results
/
DooDooHyun
/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.55
/result_2024-01-22 07:27:06.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.26621160409556316, | |
"acc_stderr": 0.012915774781523217, | |
"acc_norm": 0.32081911262798635, | |
"acc_norm_stderr": 0.013640943091946522 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.35789683330013944, | |
"acc_stderr": 0.004784018497679818, | |
"acc_norm": 0.46026687910774744, | |
"acc_norm_stderr": 0.004974001515580969 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.23976608187134502, | |
"acc_stderr": 0.03274485211946956, | |
"acc_norm": 0.23976608187134502, | |
"acc_norm_stderr": 0.03274485211946956 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.24271844660194175, | |
"acc_stderr": 0.04245022486384493, | |
"acc_norm": 0.24271844660194175, | |
"acc_norm_stderr": 0.04245022486384493 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.24776500638569604, | |
"acc_stderr": 0.015438083080568965, | |
"acc_norm": 0.24776500638569604, | |
"acc_norm_stderr": 0.015438083080568965 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.2074074074074074, | |
"acc_stderr": 0.03502553170678318, | |
"acc_norm": 0.2074074074074074, | |
"acc_norm_stderr": 0.03502553170678318 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.22, | |
"acc_stderr": 0.041633319989322695, | |
"acc_norm": 0.22, | |
"acc_norm_stderr": 0.041633319989322695 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.32340425531914896, | |
"acc_stderr": 0.03057944277361034, | |
"acc_norm": 0.32340425531914896, | |
"acc_norm_stderr": 0.03057944277361034 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.3132530120481928, | |
"acc_stderr": 0.036108050180310235, | |
"acc_norm": 0.3132530120481928, | |
"acc_norm_stderr": 0.036108050180310235 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.24437299035369775, | |
"acc_stderr": 0.02440616209466892, | |
"acc_norm": 0.24437299035369775, | |
"acc_norm_stderr": 0.02440616209466892 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.3542600896860987, | |
"acc_stderr": 0.032100621541349864, | |
"acc_norm": 0.3542600896860987, | |
"acc_norm_stderr": 0.032100621541349864 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.24427480916030533, | |
"acc_stderr": 0.037683359597287434, | |
"acc_norm": 0.24427480916030533, | |
"acc_norm_stderr": 0.037683359597287434 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.28, | |
"acc_stderr": 0.04512608598542127, | |
"acc_norm": 0.28, | |
"acc_norm_stderr": 0.04512608598542127 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.20202020202020202, | |
"acc_stderr": 0.02860620428922987, | |
"acc_norm": 0.20202020202020202, | |
"acc_norm_stderr": 0.02860620428922987 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.23448275862068965, | |
"acc_stderr": 0.035306258743465914, | |
"acc_norm": 0.23448275862068965, | |
"acc_norm_stderr": 0.035306258743465914 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.18627450980392157, | |
"acc_stderr": 0.038739587141493524, | |
"acc_norm": 0.18627450980392157, | |
"acc_norm_stderr": 0.038739587141493524 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.226890756302521, | |
"acc_stderr": 0.02720537153827948, | |
"acc_norm": 0.226890756302521, | |
"acc_norm_stderr": 0.02720537153827948 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.2205128205128205, | |
"acc_stderr": 0.021020672680827912, | |
"acc_norm": 0.2205128205128205, | |
"acc_norm_stderr": 0.021020672680827912 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.3, | |
"acc_stderr": 0.046056618647183814, | |
"acc_norm": 0.3, | |
"acc_norm_stderr": 0.046056618647183814 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.32, | |
"acc_stderr": 0.046882617226215034, | |
"acc_norm": 0.32, | |
"acc_norm_stderr": 0.046882617226215034 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.2037037037037037, | |
"acc_stderr": 0.03893542518824846, | |
"acc_norm": 0.2037037037037037, | |
"acc_norm_stderr": 0.03893542518824846 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.2561576354679803, | |
"acc_stderr": 0.030712730070982592, | |
"acc_norm": 0.2561576354679803, | |
"acc_norm_stderr": 0.030712730070982592 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.24193548387096775, | |
"acc_stderr": 0.024362599693031086, | |
"acc_norm": 0.24193548387096775, | |
"acc_norm_stderr": 0.024362599693031086 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.2863247863247863, | |
"acc_stderr": 0.029614323690456645, | |
"acc_norm": 0.2863247863247863, | |
"acc_norm_stderr": 0.029614323690456645 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.20754716981132076, | |
"acc_stderr": 0.024959918028911274, | |
"acc_norm": 0.20754716981132076, | |
"acc_norm_stderr": 0.024959918028911274 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.23636363636363636, | |
"acc_stderr": 0.04069306319721376, | |
"acc_norm": 0.23636363636363636, | |
"acc_norm_stderr": 0.04069306319721376 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.22592592592592592, | |
"acc_stderr": 0.025497532639609546, | |
"acc_norm": 0.22592592592592592, | |
"acc_norm_stderr": 0.025497532639609546 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.271523178807947, | |
"acc_stderr": 0.03631329803969654, | |
"acc_norm": 0.271523178807947, | |
"acc_norm_stderr": 0.03631329803969654 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.23383084577114427, | |
"acc_stderr": 0.029929415408348387, | |
"acc_norm": 0.23383084577114427, | |
"acc_norm_stderr": 0.029929415408348387 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.2254335260115607, | |
"acc_stderr": 0.031862098516411426, | |
"acc_norm": 0.2254335260115607, | |
"acc_norm_stderr": 0.031862098516411426 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.2328042328042328, | |
"acc_stderr": 0.02176596167215454, | |
"acc_norm": 0.2328042328042328, | |
"acc_norm_stderr": 0.02176596167215454 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.2708333333333333, | |
"acc_stderr": 0.03716177437566017, | |
"acc_norm": 0.2708333333333333, | |
"acc_norm_stderr": 0.03716177437566017 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.25, | |
"acc_stderr": 0.04351941398892446, | |
"acc_norm": 0.25, | |
"acc_norm_stderr": 0.04351941398892446 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.27, | |
"acc_stderr": 0.044619604333847394, | |
"acc_norm": 0.27, | |
"acc_norm_stderr": 0.044619604333847394 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.23410404624277456, | |
"acc_stderr": 0.022797110278071138, | |
"acc_norm": 0.23410404624277456, | |
"acc_norm_stderr": 0.022797110278071138 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.24539877300613497, | |
"acc_stderr": 0.03380939813943354, | |
"acc_norm": 0.24539877300613497, | |
"acc_norm_stderr": 0.03380939813943354 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.2839506172839506, | |
"acc_stderr": 0.025089478523765134, | |
"acc_norm": 0.2839506172839506, | |
"acc_norm_stderr": 0.025089478523765134 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.26, | |
"acc_stderr": 0.04408440022768078, | |
"acc_norm": 0.26, | |
"acc_norm_stderr": 0.04408440022768078 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.20725388601036268, | |
"acc_stderr": 0.029252823291803627, | |
"acc_norm": 0.20725388601036268, | |
"acc_norm_stderr": 0.029252823291803627 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.2807017543859649, | |
"acc_stderr": 0.04227054451232199, | |
"acc_norm": 0.2807017543859649, | |
"acc_norm_stderr": 0.04227054451232199 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.20733944954128442, | |
"acc_stderr": 0.017381415563608674, | |
"acc_norm": 0.20733944954128442, | |
"acc_norm_stderr": 0.017381415563608674 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.21428571428571427, | |
"acc_stderr": 0.03670066451047182, | |
"acc_norm": 0.21428571428571427, | |
"acc_norm_stderr": 0.03670066451047182 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.23202614379084968, | |
"acc_stderr": 0.024170840879341005, | |
"acc_norm": 0.23202614379084968, | |
"acc_norm_stderr": 0.024170840879341005 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.24, | |
"acc_stderr": 0.04292346959909284, | |
"acc_norm": 0.24, | |
"acc_norm_stderr": 0.04292346959909284 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.3305785123966942, | |
"acc_stderr": 0.04294340845212094, | |
"acc_norm": 0.3305785123966942, | |
"acc_norm_stderr": 0.04294340845212094 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.17763157894736842, | |
"acc_stderr": 0.03110318238312337, | |
"acc_norm": 0.17763157894736842, | |
"acc_norm_stderr": 0.03110318238312337 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.2647058823529412, | |
"acc_stderr": 0.017848089574913226, | |
"acc_norm": 0.2647058823529412, | |
"acc_norm_stderr": 0.017848089574913226 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.2553191489361702, | |
"acc_stderr": 0.02601199293090201, | |
"acc_norm": 0.2553191489361702, | |
"acc_norm_stderr": 0.02601199293090201 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.3482142857142857, | |
"acc_stderr": 0.04521829902833585, | |
"acc_norm": 0.3482142857142857, | |
"acc_norm_stderr": 0.04521829902833585 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.23148148148148148, | |
"acc_stderr": 0.02876511171804693, | |
"acc_norm": 0.23148148148148148, | |
"acc_norm_stderr": 0.02876511171804693 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.27150837988826815, | |
"acc_stderr": 0.014874252168095277, | |
"acc_norm": 0.27150837988826815, | |
"acc_norm_stderr": 0.014874252168095277 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.23, | |
"acc_stderr": 0.042295258468165065, | |
"acc_norm": 0.23, | |
"acc_norm_stderr": 0.042295258468165065 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.31, | |
"acc_stderr": 0.04648231987117316, | |
"acc_norm": 0.31, | |
"acc_norm_stderr": 0.04648231987117316 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.34191176470588236, | |
"acc_stderr": 0.028814722422254174, | |
"acc_norm": 0.34191176470588236, | |
"acc_norm_stderr": 0.028814722422254174 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.19591836734693877, | |
"acc_stderr": 0.025409301953225678, | |
"acc_norm": 0.19591836734693877, | |
"acc_norm_stderr": 0.025409301953225678 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.24472573839662448, | |
"acc_stderr": 0.02798569938703642, | |
"acc_norm": 0.24472573839662448, | |
"acc_norm_stderr": 0.02798569938703642 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.2457627118644068, | |
"acc_stderr": 0.010996156635142692, | |
"acc_norm": 0.2457627118644068, | |
"acc_norm_stderr": 0.010996156635142692 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.3137254901960784, | |
"acc_stderr": 0.03256685484460389, | |
"acc_norm": 0.3137254901960784, | |
"acc_norm_stderr": 0.03256685484460389 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.19393939393939394, | |
"acc_stderr": 0.0308741451365621, | |
"acc_norm": 0.19393939393939394, | |
"acc_norm_stderr": 0.0308741451365621 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.2729498164014688, | |
"mc1_stderr": 0.015594753632006518, | |
"mc2": 0.4163135604722655, | |
"mc2_stderr": 0.015044751734204925 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.2597402597402597, | |
"acc_stderr": 0.015075666411230305, | |
"acc_norm": 0.3707201889020071, | |
"acc_norm_stderr": 0.016605801289212595 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "DooDooHyun/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.55", | |
"model_sha": "511690a94f6192d8b56dc822c6278000d32af054", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |