results / The-matt /llama2_ko-7b_sandy-fire-170_1530 /result_2023-11-13 07:13:52.json
open-ko-llm-bot's picture
Add results for 2023-11-13 07:13:52
937bb4a
raw history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3165529010238908,
"acc_stderr": 0.013592431519068084,
"acc_norm": 0.3728668941979522,
"acc_norm_stderr": 0.014131176760131165
},
"harness|ko_hellaswag|10": {
"acc": 0.37532364070902213,
"acc_stderr": 0.004832167854501651,
"acc_norm": 0.48994224258115915,
"acc_norm_stderr": 0.004988771791854509
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.25146198830409355,
"acc_stderr": 0.033275044238468436,
"acc_norm": 0.25146198830409355,
"acc_norm_stderr": 0.033275044238468436
},
"harness|ko_mmlu_management|5": {
"acc": 0.2524271844660194,
"acc_stderr": 0.04301250399690878,
"acc_norm": 0.2524271844660194,
"acc_norm_stderr": 0.04301250399690878
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.01685739124747255,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.01685739124747255
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.03944624162501116,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.03944624162501116
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768077,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768077
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.31063829787234043,
"acc_stderr": 0.03025123757921317,
"acc_norm": 0.31063829787234043,
"acc_norm_stderr": 0.03025123757921317
},
"harness|ko_mmlu_virology|5": {
"acc": 0.2891566265060241,
"acc_stderr": 0.03529486801511115,
"acc_norm": 0.2891566265060241,
"acc_norm_stderr": 0.03529486801511115
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.3729903536977492,
"acc_stderr": 0.02746661021314012,
"acc_norm": 0.3729903536977492,
"acc_norm_stderr": 0.02746661021314012
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.273542600896861,
"acc_stderr": 0.029918586707798834,
"acc_norm": 0.273542600896861,
"acc_norm_stderr": 0.029918586707798834
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.3435114503816794,
"acc_stderr": 0.041649760719448786,
"acc_norm": 0.3435114503816794,
"acc_norm_stderr": 0.041649760719448786
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.3484848484848485,
"acc_stderr": 0.033948539651564025,
"acc_norm": 0.3484848484848485,
"acc_norm_stderr": 0.033948539651564025
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.2896551724137931,
"acc_stderr": 0.03780019230438014,
"acc_norm": 0.2896551724137931,
"acc_norm_stderr": 0.03780019230438014
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.13725490196078433,
"acc_stderr": 0.0342408466989152,
"acc_norm": 0.13725490196078433,
"acc_norm_stderr": 0.0342408466989152
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.24789915966386555,
"acc_stderr": 0.028047967224176892,
"acc_norm": 0.24789915966386555,
"acc_norm_stderr": 0.028047967224176892
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.21794871794871795,
"acc_stderr": 0.020932445774463182,
"acc_norm": 0.21794871794871795,
"acc_norm_stderr": 0.020932445774463182
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.25,
"acc_stderr": 0.04186091791394607,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04186091791394607
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3103448275862069,
"acc_stderr": 0.03255086769970103,
"acc_norm": 0.3103448275862069,
"acc_norm_stderr": 0.03255086769970103
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.2967741935483871,
"acc_stderr": 0.02598850079241188,
"acc_norm": 0.2967741935483871,
"acc_norm_stderr": 0.02598850079241188
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.3247863247863248,
"acc_stderr": 0.03067902276549883,
"acc_norm": 0.3247863247863248,
"acc_norm_stderr": 0.03067902276549883
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.35471698113207545,
"acc_stderr": 0.029445175328199593,
"acc_norm": 0.35471698113207545,
"acc_norm_stderr": 0.029445175328199593
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.33636363636363636,
"acc_stderr": 0.04525393596302505,
"acc_norm": 0.33636363636363636,
"acc_norm_stderr": 0.04525393596302505
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.02730914058823018,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.02730914058823018
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2847682119205298,
"acc_stderr": 0.03684881521389024,
"acc_norm": 0.2847682119205298,
"acc_norm_stderr": 0.03684881521389024
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.25870646766169153,
"acc_stderr": 0.030965903123573033,
"acc_norm": 0.25870646766169153,
"acc_norm_stderr": 0.030965903123573033
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.24855491329479767,
"acc_stderr": 0.03295304696818318,
"acc_norm": 0.24855491329479767,
"acc_norm_stderr": 0.03295304696818318
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.25132275132275134,
"acc_stderr": 0.022340482339643898,
"acc_norm": 0.25132275132275134,
"acc_norm_stderr": 0.022340482339643898
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.1875,
"acc_stderr": 0.032639560491693344,
"acc_norm": 0.1875,
"acc_norm_stderr": 0.032639560491693344
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.22,
"acc_stderr": 0.0416333199893227,
"acc_norm": 0.22,
"acc_norm_stderr": 0.0416333199893227
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.28901734104046245,
"acc_stderr": 0.02440517393578324,
"acc_norm": 0.28901734104046245,
"acc_norm_stderr": 0.02440517393578324
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.26380368098159507,
"acc_stderr": 0.034624199316156234,
"acc_norm": 0.26380368098159507,
"acc_norm_stderr": 0.034624199316156234
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.31790123456790126,
"acc_stderr": 0.025910063528240865,
"acc_norm": 0.31790123456790126,
"acc_norm_stderr": 0.025910063528240865
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768078
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.2694300518134715,
"acc_stderr": 0.03201867122877793,
"acc_norm": 0.2694300518134715,
"acc_norm_stderr": 0.03201867122877793
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2807017543859649,
"acc_stderr": 0.04227054451232199,
"acc_norm": 0.2807017543859649,
"acc_norm_stderr": 0.04227054451232199
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.3100917431192661,
"acc_stderr": 0.01983084968443975,
"acc_norm": 0.3100917431192661,
"acc_norm_stderr": 0.01983084968443975
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.1984126984126984,
"acc_stderr": 0.03567016675276863,
"acc_norm": 0.1984126984126984,
"acc_norm_stderr": 0.03567016675276863
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.32679738562091504,
"acc_stderr": 0.026857294663281413,
"acc_norm": 0.32679738562091504,
"acc_norm_stderr": 0.026857294663281413
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.4214876033057851,
"acc_stderr": 0.045077322787750944,
"acc_norm": 0.4214876033057851,
"acc_norm_stderr": 0.045077322787750944
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.28289473684210525,
"acc_stderr": 0.03665349695640767,
"acc_norm": 0.28289473684210525,
"acc_norm_stderr": 0.03665349695640767
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.31209150326797386,
"acc_stderr": 0.01874501120127766,
"acc_norm": 0.31209150326797386,
"acc_norm_stderr": 0.01874501120127766
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.23404255319148937,
"acc_stderr": 0.025257861359432403,
"acc_norm": 0.23404255319148937,
"acc_norm_stderr": 0.025257861359432403
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.33035714285714285,
"acc_stderr": 0.04464285714285712,
"acc_norm": 0.33035714285714285,
"acc_norm_stderr": 0.04464285714285712
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.26851851851851855,
"acc_stderr": 0.030225226160012376,
"acc_norm": 0.26851851851851855,
"acc_norm_stderr": 0.030225226160012376
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2424581005586592,
"acc_stderr": 0.01433352205921789,
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.01433352205921789
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.24,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.24,
"acc_norm_stderr": 0.042923469599092816
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.35661764705882354,
"acc_stderr": 0.02909720956841195,
"acc_norm": 0.35661764705882354,
"acc_norm_stderr": 0.02909720956841195
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.24489795918367346,
"acc_stderr": 0.027529637440174934,
"acc_norm": 0.24489795918367346,
"acc_norm_stderr": 0.027529637440174934
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.29535864978902954,
"acc_stderr": 0.029696338713422882,
"acc_norm": 0.29535864978902954,
"acc_norm_stderr": 0.029696338713422882
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.2653194263363755,
"acc_stderr": 0.011276198843958876,
"acc_norm": 0.2653194263363755,
"acc_norm_stderr": 0.011276198843958876
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.25,
"acc_stderr": 0.03039153369274154,
"acc_norm": 0.25,
"acc_norm_stderr": 0.03039153369274154
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.28484848484848485,
"acc_stderr": 0.03524390844511785,
"acc_norm": 0.28484848484848485,
"acc_norm_stderr": 0.03524390844511785
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.25458996328029376,
"mc1_stderr": 0.015250117079156475,
"mc2": 0.3966274374680779,
"mc2_stderr": 0.014846518193358589
},
"harness|ko_commongen_v2|2": {
"acc": 0.3695395513577332,
"acc_stderr": 0.01659488340568542,
"acc_norm": 0.51357733175915,
"acc_norm_stderr": 0.01718401506040145
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "The-matt/llama2_ko-7b_sandy-fire-170_1530",
"model_sha": "b963fcf8d7249c3f360ccfa5db70c0b20bddeb08",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}