results / AIFT /AIFT-instruct-42dot_LLM-SFT-1.3B /result_2024-01-30 00:16:56.json
open-ko-llm-bot's picture
Add results for 2024-01-30 00:16:56
1dfe6bc verified
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.2721843003412969,
"acc_stderr": 0.013006600406423709,
"acc_norm": 0.33276450511945393,
"acc_norm_stderr": 0.013769863046192305
},
"harness|ko_hellaswag|10": {
"acc": 0.3543118900617407,
"acc_stderr": 0.004773267510112743,
"acc_norm": 0.4435371439952201,
"acc_norm_stderr": 0.004957863944093124
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.29239766081871343,
"acc_stderr": 0.03488647713457923,
"acc_norm": 0.29239766081871343,
"acc_norm_stderr": 0.03488647713457923
},
"harness|ko_mmlu_management|5": {
"acc": 0.2621359223300971,
"acc_stderr": 0.04354631077260597,
"acc_norm": 0.2621359223300971,
"acc_norm_stderr": 0.04354631077260597
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.21966794380587484,
"acc_stderr": 0.014805384478371169,
"acc_norm": 0.21966794380587484,
"acc_norm_stderr": 0.014805384478371169
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.2,
"acc_stderr": 0.03455473702325438,
"acc_norm": 0.2,
"acc_norm_stderr": 0.03455473702325438
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768079,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768079
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.23829787234042554,
"acc_stderr": 0.027851252973889774,
"acc_norm": 0.23829787234042554,
"acc_norm_stderr": 0.027851252973889774
},
"harness|ko_mmlu_virology|5": {
"acc": 0.2891566265060241,
"acc_stderr": 0.035294868015111155,
"acc_norm": 0.2891566265060241,
"acc_norm_stderr": 0.035294868015111155
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.26366559485530544,
"acc_stderr": 0.02502553850053234,
"acc_norm": 0.26366559485530544,
"acc_norm_stderr": 0.02502553850053234
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.21076233183856502,
"acc_stderr": 0.027373095500540193,
"acc_norm": 0.21076233183856502,
"acc_norm_stderr": 0.027373095500540193
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.26717557251908397,
"acc_stderr": 0.03880848301082395,
"acc_norm": 0.26717557251908397,
"acc_norm_stderr": 0.03880848301082395
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.18181818181818182,
"acc_stderr": 0.027479603010538804,
"acc_norm": 0.18181818181818182,
"acc_norm_stderr": 0.027479603010538804
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.22758620689655173,
"acc_stderr": 0.03493950380131184,
"acc_norm": 0.22758620689655173,
"acc_norm_stderr": 0.03493950380131184
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.2647058823529412,
"acc_stderr": 0.043898699568087785,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.043898699568087785
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.226890756302521,
"acc_stderr": 0.027205371538279472,
"acc_norm": 0.226890756302521,
"acc_norm_stderr": 0.027205371538279472
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.19230769230769232,
"acc_stderr": 0.019982347208637296,
"acc_norm": 0.19230769230769232,
"acc_norm_stderr": 0.019982347208637296
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.26851851851851855,
"acc_stderr": 0.04284467968052192,
"acc_norm": 0.26851851851851855,
"acc_norm_stderr": 0.04284467968052192
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.2857142857142857,
"acc_stderr": 0.03178529710642749,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.03178529710642749
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.31290322580645163,
"acc_stderr": 0.026377567028645854,
"acc_norm": 0.31290322580645163,
"acc_norm_stderr": 0.026377567028645854
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.2863247863247863,
"acc_stderr": 0.029614323690456648,
"acc_norm": 0.2863247863247863,
"acc_norm_stderr": 0.029614323690456648
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.23018867924528302,
"acc_stderr": 0.025907897122408173,
"acc_norm": 0.23018867924528302,
"acc_norm_stderr": 0.025907897122408173
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.32727272727272727,
"acc_stderr": 0.04494290866252089,
"acc_norm": 0.32727272727272727,
"acc_norm_stderr": 0.04494290866252089
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.23703703703703705,
"acc_stderr": 0.025928876132766107,
"acc_norm": 0.23703703703703705,
"acc_norm_stderr": 0.025928876132766107
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2781456953642384,
"acc_stderr": 0.03658603262763743,
"acc_norm": 0.2781456953642384,
"acc_norm_stderr": 0.03658603262763743
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.24378109452736318,
"acc_stderr": 0.03036049015401464,
"acc_norm": 0.24378109452736318,
"acc_norm_stderr": 0.03036049015401464
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.2774566473988439,
"acc_stderr": 0.03414014007044036,
"acc_norm": 0.2774566473988439,
"acc_norm_stderr": 0.03414014007044036
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.24338624338624337,
"acc_stderr": 0.02210112878741543,
"acc_norm": 0.24338624338624337,
"acc_norm_stderr": 0.02210112878741543
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.25,
"acc_stderr": 0.03621034121889507,
"acc_norm": 0.25,
"acc_norm_stderr": 0.03621034121889507
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.2745664739884393,
"acc_stderr": 0.024027745155265016,
"acc_norm": 0.2745664739884393,
"acc_norm_stderr": 0.024027745155265016
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.22085889570552147,
"acc_stderr": 0.03259177392742178,
"acc_norm": 0.22085889570552147,
"acc_norm_stderr": 0.03259177392742178
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.26851851851851855,
"acc_stderr": 0.02465968518596728,
"acc_norm": 0.26851851851851855,
"acc_norm_stderr": 0.02465968518596728
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.21243523316062177,
"acc_stderr": 0.02951928261681725,
"acc_norm": 0.21243523316062177,
"acc_norm_stderr": 0.02951928261681725
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2807017543859649,
"acc_stderr": 0.042270544512321984,
"acc_norm": 0.2807017543859649,
"acc_norm_stderr": 0.042270544512321984
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.1944954128440367,
"acc_stderr": 0.016970289090458054,
"acc_norm": 0.1944954128440367,
"acc_norm_stderr": 0.016970289090458054
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.30952380952380953,
"acc_stderr": 0.04134913018303316,
"acc_norm": 0.30952380952380953,
"acc_norm_stderr": 0.04134913018303316
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.28431372549019607,
"acc_stderr": 0.025829163272757468,
"acc_norm": 0.28431372549019607,
"acc_norm_stderr": 0.025829163272757468
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909284,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909284
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.34710743801652894,
"acc_stderr": 0.04345724570292534,
"acc_norm": 0.34710743801652894,
"acc_norm_stderr": 0.04345724570292534
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.26973684210526316,
"acc_stderr": 0.03611780560284898,
"acc_norm": 0.26973684210526316,
"acc_norm_stderr": 0.03611780560284898
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.25,
"acc_stderr": 0.01751781884501444,
"acc_norm": 0.25,
"acc_norm_stderr": 0.01751781884501444
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.23404255319148937,
"acc_stderr": 0.025257861359432414,
"acc_norm": 0.23404255319148937,
"acc_norm_stderr": 0.025257861359432414
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.20535714285714285,
"acc_stderr": 0.038342410214190714,
"acc_norm": 0.20535714285714285,
"acc_norm_stderr": 0.038342410214190714
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.4305555555555556,
"acc_stderr": 0.03376922151252335,
"acc_norm": 0.4305555555555556,
"acc_norm_stderr": 0.03376922151252335
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.27262569832402234,
"acc_stderr": 0.014893391735249608,
"acc_norm": 0.27262569832402234,
"acc_norm_stderr": 0.014893391735249608
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.34558823529411764,
"acc_stderr": 0.02888819310398865,
"acc_norm": 0.34558823529411764,
"acc_norm_stderr": 0.02888819310398865
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.19591836734693877,
"acc_stderr": 0.02540930195322568,
"acc_norm": 0.19591836734693877,
"acc_norm_stderr": 0.02540930195322568
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.3037974683544304,
"acc_stderr": 0.0299366963871386,
"acc_norm": 0.3037974683544304,
"acc_norm_stderr": 0.0299366963871386
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.2438070404172099,
"acc_stderr": 0.010966507972178475,
"acc_norm": 0.2438070404172099,
"acc_norm_stderr": 0.010966507972178475
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.2696078431372549,
"acc_stderr": 0.03114557065948678,
"acc_norm": 0.2696078431372549,
"acc_norm_stderr": 0.03114557065948678
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.23636363636363636,
"acc_stderr": 0.033175059300091805,
"acc_norm": 0.23636363636363636,
"acc_norm_stderr": 0.033175059300091805
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.25458996328029376,
"mc1_stderr": 0.015250117079156465,
"mc2": 0.41626362709754605,
"mc2_stderr": 0.015226489644958928
},
"harness|ko_commongen_v2|2": {
"acc": 0.3234946871310508,
"acc_stderr": 0.016083627290483675,
"acc_norm": 0.44391971664698937,
"acc_norm_stderr": 0.017081884623542543
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "AIFT/AIFT-instruct-42dot_LLM-SFT-1.3B",
"model_sha": "58801e4a8909a9cda6173c51bd79470297beb4af",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}