|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.20563139931740615, |
|
"acc_stderr": 0.011810745260742581, |
|
"acc_norm": 0.24573378839590443, |
|
"acc_norm_stderr": 0.012581033453730107 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.28540131447918743, |
|
"acc_stderr": 0.0045068240943332985, |
|
"acc_norm": 0.3209520015933081, |
|
"acc_norm_stderr": 0.004658882929099508 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.30409356725146197, |
|
"acc_stderr": 0.03528211258245233, |
|
"acc_norm": 0.30409356725146197, |
|
"acc_norm_stderr": 0.03528211258245233 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.17475728155339806, |
|
"acc_stderr": 0.037601780060266196, |
|
"acc_norm": 0.17475728155339806, |
|
"acc_norm_stderr": 0.037601780060266196 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.26181353767560667, |
|
"acc_stderr": 0.015720838678445266, |
|
"acc_norm": 0.26181353767560667, |
|
"acc_norm_stderr": 0.015720838678445266 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.3037037037037037, |
|
"acc_stderr": 0.039725528847851375, |
|
"acc_norm": 0.3037037037037037, |
|
"acc_norm_stderr": 0.039725528847851375 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.22127659574468084, |
|
"acc_stderr": 0.02713634960242405, |
|
"acc_norm": 0.22127659574468084, |
|
"acc_norm_stderr": 0.02713634960242405 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.2891566265060241, |
|
"acc_stderr": 0.035294868015111155, |
|
"acc_norm": 0.2891566265060241, |
|
"acc_norm_stderr": 0.035294868015111155 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.2733118971061093, |
|
"acc_stderr": 0.025311765975426115, |
|
"acc_norm": 0.2733118971061093, |
|
"acc_norm_stderr": 0.025311765975426115 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.242152466367713, |
|
"acc_stderr": 0.028751392398694755, |
|
"acc_norm": 0.242152466367713, |
|
"acc_norm_stderr": 0.028751392398694755 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.2900763358778626, |
|
"acc_stderr": 0.03980066246467766, |
|
"acc_norm": 0.2900763358778626, |
|
"acc_norm_stderr": 0.03980066246467766 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.19696969696969696, |
|
"acc_stderr": 0.02833560973246335, |
|
"acc_norm": 0.19696969696969696, |
|
"acc_norm_stderr": 0.02833560973246335 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.0333333333333333, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.0333333333333333 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.04158307533083286, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.04158307533083286 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.28991596638655465, |
|
"acc_stderr": 0.029472485833136098, |
|
"acc_norm": 0.28991596638655465, |
|
"acc_norm_stderr": 0.029472485833136098 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.34615384615384615, |
|
"acc_stderr": 0.024121125416941187, |
|
"acc_norm": 0.34615384615384615, |
|
"acc_norm_stderr": 0.024121125416941187 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.04020151261036844, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.04020151261036844 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04186091791394607, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04186091791394607 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.18719211822660098, |
|
"acc_stderr": 0.027444924966882618, |
|
"acc_norm": 0.18719211822660098, |
|
"acc_norm_stderr": 0.027444924966882618 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.2645161290322581, |
|
"acc_stderr": 0.02509189237885928, |
|
"acc_norm": 0.2645161290322581, |
|
"acc_norm_stderr": 0.02509189237885928 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.19658119658119658, |
|
"acc_stderr": 0.02603538609895129, |
|
"acc_norm": 0.19658119658119658, |
|
"acc_norm_stderr": 0.02603538609895129 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.024618298195866507, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.024618298195866507 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.21818181818181817, |
|
"acc_stderr": 0.03955932861795833, |
|
"acc_norm": 0.21818181818181817, |
|
"acc_norm_stderr": 0.03955932861795833 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.02794045713622841, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.02794045713622841 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.33112582781456956, |
|
"acc_stderr": 0.038425817186598696, |
|
"acc_norm": 0.33112582781456956, |
|
"acc_norm_stderr": 0.038425817186598696 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.24875621890547264, |
|
"acc_stderr": 0.030567675938916707, |
|
"acc_norm": 0.24875621890547264, |
|
"acc_norm_stderr": 0.030567675938916707 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3179190751445087, |
|
"acc_stderr": 0.03550683989165581, |
|
"acc_norm": 0.3179190751445087, |
|
"acc_norm_stderr": 0.03550683989165581 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2619047619047619, |
|
"acc_stderr": 0.022644212615525218, |
|
"acc_norm": 0.2619047619047619, |
|
"acc_norm_stderr": 0.022644212615525218 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.22916666666666666, |
|
"acc_stderr": 0.03514697467862388, |
|
"acc_norm": 0.22916666666666666, |
|
"acc_norm_stderr": 0.03514697467862388 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909282, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909282 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.21, |
|
"acc_stderr": 0.040936018074033256, |
|
"acc_norm": 0.21, |
|
"acc_norm_stderr": 0.040936018074033256 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.2543352601156069, |
|
"acc_stderr": 0.023445826276545543, |
|
"acc_norm": 0.2543352601156069, |
|
"acc_norm_stderr": 0.023445826276545543 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.22699386503067484, |
|
"acc_stderr": 0.032910995786157714, |
|
"acc_norm": 0.22699386503067484, |
|
"acc_norm_stderr": 0.032910995786157714 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.2345679012345679, |
|
"acc_stderr": 0.023576881744005723, |
|
"acc_norm": 0.2345679012345679, |
|
"acc_norm_stderr": 0.023576881744005723 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421296, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421296 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.2694300518134715, |
|
"acc_stderr": 0.032018671228777947, |
|
"acc_norm": 0.2694300518134715, |
|
"acc_norm_stderr": 0.032018671228777947 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.21929824561403508, |
|
"acc_stderr": 0.03892431106518754, |
|
"acc_norm": 0.21929824561403508, |
|
"acc_norm_stderr": 0.03892431106518754 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.23853211009174313, |
|
"acc_stderr": 0.01827257581023186, |
|
"acc_norm": 0.23853211009174313, |
|
"acc_norm_stderr": 0.01827257581023186 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.23015873015873015, |
|
"acc_stderr": 0.03764950879790606, |
|
"acc_norm": 0.23015873015873015, |
|
"acc_norm_stderr": 0.03764950879790606 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.21241830065359477, |
|
"acc_stderr": 0.02342037547829613, |
|
"acc_norm": 0.21241830065359477, |
|
"acc_norm_stderr": 0.02342037547829613 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621505, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621505 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.24793388429752067, |
|
"acc_stderr": 0.03941897526516303, |
|
"acc_norm": 0.24793388429752067, |
|
"acc_norm_stderr": 0.03941897526516303 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.17763157894736842, |
|
"acc_stderr": 0.03110318238312338, |
|
"acc_norm": 0.17763157894736842, |
|
"acc_norm_stderr": 0.03110318238312338 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.24836601307189543, |
|
"acc_stderr": 0.01747948700136476, |
|
"acc_norm": 0.24836601307189543, |
|
"acc_norm_stderr": 0.01747948700136476 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.24468085106382978, |
|
"acc_stderr": 0.025645553622266726, |
|
"acc_norm": 0.24468085106382978, |
|
"acc_norm_stderr": 0.025645553622266726 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.17857142857142858, |
|
"acc_stderr": 0.036352091215778065, |
|
"acc_norm": 0.17857142857142858, |
|
"acc_norm_stderr": 0.036352091215778065 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.4305555555555556, |
|
"acc_stderr": 0.03376922151252336, |
|
"acc_norm": 0.4305555555555556, |
|
"acc_norm_stderr": 0.03376922151252336 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24916201117318434, |
|
"acc_stderr": 0.014465893829859919, |
|
"acc_norm": 0.24916201117318434, |
|
"acc_norm_stderr": 0.014465893829859919 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252604, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252604 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.41544117647058826, |
|
"acc_stderr": 0.02993534270787775, |
|
"acc_norm": 0.41544117647058826, |
|
"acc_norm_stderr": 0.02993534270787775 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.02560737598657916, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.02560737598657916 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.270042194092827, |
|
"acc_stderr": 0.028900721906293426, |
|
"acc_norm": 0.270042194092827, |
|
"acc_norm_stderr": 0.028900721906293426 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.242503259452412, |
|
"acc_stderr": 0.010946570966348783, |
|
"acc_norm": 0.242503259452412, |
|
"acc_norm_stderr": 0.010946570966348783 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.27450980392156865, |
|
"acc_stderr": 0.03132179803083292, |
|
"acc_norm": 0.27450980392156865, |
|
"acc_norm_stderr": 0.03132179803083292 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.24242424242424243, |
|
"acc_stderr": 0.033464098810559534, |
|
"acc_norm": 0.24242424242424243, |
|
"acc_norm_stderr": 0.033464098810559534 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2533659730722154, |
|
"mc1_stderr": 0.01522589934082682, |
|
"mc2": 0.4666916578437702, |
|
"mc2_stderr": 0.015201094715829425 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.3412042502951594, |
|
"acc_stderr": 0.016300368742137302, |
|
"acc_norm": 0.48760330578512395, |
|
"acc_norm_stderr": 0.017185069732676517 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "heegyu/kogpt-j-base", |
|
"model_sha": "212ebff345958e108fc47ae0daa892328ca6ece2", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |