results / Herry443 /Mistral-7B-KNUT-ref-ALL /result_2024-02-05 06:09:30.json
open-ko-llm-bot's picture
Add results for 2024-02-05 06:09:30
9413136 verified
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.19027303754266212,
"acc_stderr": 0.011470424179225702,
"acc_norm": 0.23976109215017063,
"acc_norm_stderr": 0.012476304127453956
},
"harness|ko_hellaswag|10": {
"acc": 0.2848038239394543,
"acc_stderr": 0.004503985839041984,
"acc_norm": 0.31607249551882094,
"acc_norm_stderr": 0.0046399137096159344
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.1286549707602339,
"acc_stderr": 0.02567934272327694,
"acc_norm": 0.1286549707602339,
"acc_norm_stderr": 0.02567934272327694
},
"harness|ko_mmlu_management|5": {
"acc": 0.3592233009708738,
"acc_stderr": 0.04750458399041694,
"acc_norm": 0.3592233009708738,
"acc_norm_stderr": 0.04750458399041694
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.2720306513409962,
"acc_stderr": 0.015913367447500524,
"acc_norm": 0.2720306513409962,
"acc_norm_stderr": 0.015913367447500524
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.22962962962962963,
"acc_stderr": 0.036333844140734636,
"acc_norm": 0.22962962962962963,
"acc_norm_stderr": 0.036333844140734636
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.12,
"acc_stderr": 0.03265986323710905,
"acc_norm": 0.12,
"acc_norm_stderr": 0.03265986323710905
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.225531914893617,
"acc_stderr": 0.02732107841738753,
"acc_norm": 0.225531914893617,
"acc_norm_stderr": 0.02732107841738753
},
"harness|ko_mmlu_virology|5": {
"acc": 0.28313253012048195,
"acc_stderr": 0.03507295431370518,
"acc_norm": 0.28313253012048195,
"acc_norm_stderr": 0.03507295431370518
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.3054662379421222,
"acc_stderr": 0.026160584450140474,
"acc_norm": 0.3054662379421222,
"acc_norm_stderr": 0.026160584450140474
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.2600896860986547,
"acc_stderr": 0.029442495585857476,
"acc_norm": 0.2600896860986547,
"acc_norm_stderr": 0.029442495585857476
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.2366412213740458,
"acc_stderr": 0.037276735755969195,
"acc_norm": 0.2366412213740458,
"acc_norm_stderr": 0.037276735755969195
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621505,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621505
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.18181818181818182,
"acc_stderr": 0.0274796030105388,
"acc_norm": 0.18181818181818182,
"acc_norm_stderr": 0.0274796030105388
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.19310344827586207,
"acc_stderr": 0.03289445522127402,
"acc_norm": 0.19310344827586207,
"acc_norm_stderr": 0.03289445522127402
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.3627450980392157,
"acc_stderr": 0.04784060704105653,
"acc_norm": 0.3627450980392157,
"acc_norm_stderr": 0.04784060704105653
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.3487394957983193,
"acc_stderr": 0.030956636328566548,
"acc_norm": 0.3487394957983193,
"acc_norm_stderr": 0.030956636328566548
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.37435897435897436,
"acc_stderr": 0.024537591572830517,
"acc_norm": 0.37435897435897436,
"acc_norm_stderr": 0.024537591572830517
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.18,
"acc_stderr": 0.03861229196653694,
"acc_norm": 0.18,
"acc_norm_stderr": 0.03861229196653694
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.042365112580946315,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.042365112580946315
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.2955665024630542,
"acc_stderr": 0.032104944337514575,
"acc_norm": 0.2955665024630542,
"acc_norm_stderr": 0.032104944337514575
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.3193548387096774,
"acc_stderr": 0.026522709674667768,
"acc_norm": 0.3193548387096774,
"acc_norm_stderr": 0.026522709674667768
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.19658119658119658,
"acc_stderr": 0.02603538609895129,
"acc_norm": 0.19658119658119658,
"acc_norm_stderr": 0.02603538609895129
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.25660377358490566,
"acc_stderr": 0.02688064788905199,
"acc_norm": 0.25660377358490566,
"acc_norm_stderr": 0.02688064788905199
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.19090909090909092,
"acc_stderr": 0.03764425585984925,
"acc_norm": 0.19090909090909092,
"acc_norm_stderr": 0.03764425585984925
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.02784081149587192,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.02784081149587192
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.33112582781456956,
"acc_stderr": 0.038425817186598696,
"acc_norm": 0.33112582781456956,
"acc_norm_stderr": 0.038425817186598696
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.23880597014925373,
"acc_stderr": 0.030147775935409224,
"acc_norm": 0.23880597014925373,
"acc_norm_stderr": 0.030147775935409224
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.2947976878612717,
"acc_stderr": 0.034765996075164785,
"acc_norm": 0.2947976878612717,
"acc_norm_stderr": 0.034765996075164785
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.24603174603174602,
"acc_stderr": 0.022182037202948368,
"acc_norm": 0.24603174603174602,
"acc_norm_stderr": 0.022182037202948368
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2708333333333333,
"acc_stderr": 0.03716177437566017,
"acc_norm": 0.2708333333333333,
"acc_norm_stderr": 0.03716177437566017
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001975
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.24277456647398843,
"acc_stderr": 0.0230836585869842,
"acc_norm": 0.24277456647398843,
"acc_norm_stderr": 0.0230836585869842
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.26380368098159507,
"acc_stderr": 0.034624199316156234,
"acc_norm": 0.26380368098159507,
"acc_norm_stderr": 0.034624199316156234
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.2191358024691358,
"acc_stderr": 0.023016705640262185,
"acc_norm": 0.2191358024691358,
"acc_norm_stderr": 0.023016705640262185
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.36787564766839376,
"acc_stderr": 0.034801756684660366,
"acc_norm": 0.36787564766839376,
"acc_norm_stderr": 0.034801756684660366
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.23684210526315788,
"acc_stderr": 0.03999423879281336,
"acc_norm": 0.23684210526315788,
"acc_norm_stderr": 0.03999423879281336
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.1963302752293578,
"acc_stderr": 0.017030719339154368,
"acc_norm": 0.1963302752293578,
"acc_norm_stderr": 0.017030719339154368
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.30952380952380953,
"acc_stderr": 0.04134913018303316,
"acc_norm": 0.30952380952380953,
"acc_norm_stderr": 0.04134913018303316
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.2549019607843137,
"acc_stderr": 0.02495418432487991,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.02495418432487991
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.27,
"acc_stderr": 0.04461960433384739,
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384739
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.23140495867768596,
"acc_stderr": 0.03849856098794088,
"acc_norm": 0.23140495867768596,
"acc_norm_stderr": 0.03849856098794088
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.17763157894736842,
"acc_stderr": 0.03110318238312338,
"acc_norm": 0.17763157894736842,
"acc_norm_stderr": 0.03110318238312338
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.238562091503268,
"acc_stderr": 0.017242385828779606,
"acc_norm": 0.238562091503268,
"acc_norm_stderr": 0.017242385828779606
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2765957446808511,
"acc_stderr": 0.026684564340460987,
"acc_norm": 0.2765957446808511,
"acc_norm_stderr": 0.026684564340460987
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.16964285714285715,
"acc_stderr": 0.03562367850095391,
"acc_norm": 0.16964285714285715,
"acc_norm_stderr": 0.03562367850095391
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.4305555555555556,
"acc_stderr": 0.03376922151252335,
"acc_norm": 0.4305555555555556,
"acc_norm_stderr": 0.03376922151252335
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.23798882681564246,
"acc_stderr": 0.014242630070574892,
"acc_norm": 0.23798882681564246,
"acc_norm_stderr": 0.014242630070574892
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4485294117647059,
"acc_stderr": 0.030211479609121593,
"acc_norm": 0.4485294117647059,
"acc_norm_stderr": 0.030211479609121593
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.3836734693877551,
"acc_stderr": 0.031130880396235922,
"acc_norm": 0.3836734693877551,
"acc_norm_stderr": 0.031130880396235922
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.2742616033755274,
"acc_stderr": 0.029041333510598018,
"acc_norm": 0.2742616033755274,
"acc_norm_stderr": 0.029041333510598018
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.2503259452411995,
"acc_stderr": 0.011064151027165438,
"acc_norm": 0.2503259452411995,
"acc_norm_stderr": 0.011064151027165438
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.3088235294117647,
"acc_stderr": 0.03242661719827218,
"acc_norm": 0.3088235294117647,
"acc_norm_stderr": 0.03242661719827218
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.20606060606060606,
"acc_stderr": 0.03158415324047709,
"acc_norm": 0.20606060606060606,
"acc_norm_stderr": 0.03158415324047709
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.26193390452876375,
"mc1_stderr": 0.01539211880501501,
"mc2": 0.4275229431547429,
"mc2_stderr": 0.015942592796773743
},
"harness|ko_commongen_v2|2": {
"acc": 0.19008264462809918,
"acc_stderr": 0.013489827742736773,
"acc_norm": 0.2502951593860685,
"acc_norm_stderr": 0.014893137573316869
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Herry443/Mistral-7B-KNUT-ref-ALL",
"model_sha": "95f28cdf865867be553670e9665149f0ca0f78c9",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}