results / DopeorNope /Yi_lee-v1-6B /result_2023-12-05 15:04:18.json
open-ko-llm-bot's picture
Add results for 2023-12-05 15:04:18
363b8b8
raw history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3532423208191126,
"acc_stderr": 0.013967822714840055,
"acc_norm": 0.4129692832764505,
"acc_norm_stderr": 0.014388344935398324
},
"harness|ko_hellaswag|10": {
"acc": 0.3986257717586138,
"acc_stderr": 0.004886147907627406,
"acc_norm": 0.5336586337382991,
"acc_norm_stderr": 0.004978462690966918
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5087719298245614,
"acc_stderr": 0.038342347441649924,
"acc_norm": 0.5087719298245614,
"acc_norm_stderr": 0.038342347441649924
},
"harness|ko_mmlu_management|5": {
"acc": 0.6019417475728155,
"acc_stderr": 0.048467482539772386,
"acc_norm": 0.6019417475728155,
"acc_norm_stderr": 0.048467482539772386
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.545338441890166,
"acc_stderr": 0.0178063045850526,
"acc_norm": 0.545338441890166,
"acc_norm_stderr": 0.0178063045850526
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.45925925925925926,
"acc_stderr": 0.04304979692464244,
"acc_norm": 0.45925925925925926,
"acc_norm_stderr": 0.04304979692464244
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.425531914893617,
"acc_stderr": 0.032321469162244695,
"acc_norm": 0.425531914893617,
"acc_norm_stderr": 0.032321469162244695
},
"harness|ko_mmlu_virology|5": {
"acc": 0.40963855421686746,
"acc_stderr": 0.03828401115079022,
"acc_norm": 0.40963855421686746,
"acc_norm_stderr": 0.03828401115079022
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5401929260450161,
"acc_stderr": 0.028306190403305693,
"acc_norm": 0.5401929260450161,
"acc_norm_stderr": 0.028306190403305693
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.47533632286995514,
"acc_stderr": 0.03351695167652628,
"acc_norm": 0.47533632286995514,
"acc_norm_stderr": 0.03351695167652628
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.5267175572519084,
"acc_stderr": 0.04379024936553894,
"acc_norm": 0.5267175572519084,
"acc_norm_stderr": 0.04379024936553894
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620332
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.6363636363636364,
"acc_stderr": 0.03427308652999936,
"acc_norm": 0.6363636363636364,
"acc_norm_stderr": 0.03427308652999936
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.5310344827586206,
"acc_stderr": 0.04158632762097828,
"acc_norm": 0.5310344827586206,
"acc_norm_stderr": 0.04158632762097828
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.3137254901960784,
"acc_stderr": 0.04617034827006717,
"acc_norm": 0.3137254901960784,
"acc_norm_stderr": 0.04617034827006717
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5294117647058824,
"acc_stderr": 0.03242225027115006,
"acc_norm": 0.5294117647058824,
"acc_norm_stderr": 0.03242225027115006
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.4794871794871795,
"acc_stderr": 0.025329663163489943,
"acc_norm": 0.4794871794871795,
"acc_norm_stderr": 0.025329663163489943
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.58,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.58,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5092592592592593,
"acc_stderr": 0.04832853553437055,
"acc_norm": 0.5092592592592593,
"acc_norm_stderr": 0.04832853553437055
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.4088669950738916,
"acc_stderr": 0.034590588158832314,
"acc_norm": 0.4088669950738916,
"acc_norm_stderr": 0.034590588158832314
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.5064516129032258,
"acc_stderr": 0.02844163823354051,
"acc_norm": 0.5064516129032258,
"acc_norm_stderr": 0.02844163823354051
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.7350427350427351,
"acc_stderr": 0.028911208802749472,
"acc_norm": 0.7350427350427351,
"acc_norm_stderr": 0.028911208802749472
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.49433962264150944,
"acc_stderr": 0.030770900763851316,
"acc_norm": 0.49433962264150944,
"acc_norm_stderr": 0.030770900763851316
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.509090909090909,
"acc_stderr": 0.04788339768702861,
"acc_norm": 0.509090909090909,
"acc_norm_stderr": 0.04788339768702861
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.32222222222222224,
"acc_stderr": 0.028493465091028597,
"acc_norm": 0.32222222222222224,
"acc_norm_stderr": 0.028493465091028597
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.3576158940397351,
"acc_stderr": 0.03913453431177258,
"acc_norm": 0.3576158940397351,
"acc_norm_stderr": 0.03913453431177258
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5970149253731343,
"acc_stderr": 0.034683432951111266,
"acc_norm": 0.5970149253731343,
"acc_norm_stderr": 0.034683432951111266
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.45664739884393063,
"acc_stderr": 0.03798106566014498,
"acc_norm": 0.45664739884393063,
"acc_norm_stderr": 0.03798106566014498
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.34656084656084657,
"acc_stderr": 0.024508777521028424,
"acc_norm": 0.34656084656084657,
"acc_norm_stderr": 0.024508777521028424
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.041553199555931467,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.041553199555931467
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.3,
"acc_stderr": 0.04605661864718381,
"acc_norm": 0.3,
"acc_norm_stderr": 0.04605661864718381
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5028901734104047,
"acc_stderr": 0.02691864538323901,
"acc_norm": 0.5028901734104047,
"acc_norm_stderr": 0.02691864538323901
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.4785276073619632,
"acc_stderr": 0.03924746876751129,
"acc_norm": 0.4785276073619632,
"acc_norm_stderr": 0.03924746876751129
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.5123456790123457,
"acc_stderr": 0.027812262269327242,
"acc_norm": 0.5123456790123457,
"acc_norm_stderr": 0.027812262269327242
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5854922279792746,
"acc_stderr": 0.035553003195576686,
"acc_norm": 0.5854922279792746,
"acc_norm_stderr": 0.035553003195576686
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.34210526315789475,
"acc_stderr": 0.044629175353369376,
"acc_norm": 0.34210526315789475,
"acc_norm_stderr": 0.044629175353369376
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.6440366972477064,
"acc_stderr": 0.020528559278244218,
"acc_norm": 0.6440366972477064,
"acc_norm_stderr": 0.020528559278244218
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2698412698412698,
"acc_stderr": 0.039701582732351734,
"acc_norm": 0.2698412698412698,
"acc_norm_stderr": 0.039701582732351734
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.49673202614379086,
"acc_stderr": 0.02862930519400354,
"acc_norm": 0.49673202614379086,
"acc_norm_stderr": 0.02862930519400354
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.56,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.56,
"acc_norm_stderr": 0.04988876515698589
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6446280991735537,
"acc_stderr": 0.0436923632657398,
"acc_norm": 0.6446280991735537,
"acc_norm_stderr": 0.0436923632657398
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.4934210526315789,
"acc_stderr": 0.040685900502249704,
"acc_norm": 0.4934210526315789,
"acc_norm_stderr": 0.040685900502249704
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.4133986928104575,
"acc_stderr": 0.019922115682786685,
"acc_norm": 0.4133986928104575,
"acc_norm_stderr": 0.019922115682786685
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.3617021276595745,
"acc_stderr": 0.028663820147199502,
"acc_norm": 0.3617021276595745,
"acc_norm_stderr": 0.028663820147199502
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.33035714285714285,
"acc_stderr": 0.04464285714285712,
"acc_norm": 0.33035714285714285,
"acc_norm_stderr": 0.04464285714285712
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.3472222222222222,
"acc_stderr": 0.03246887243637649,
"acc_norm": 0.3472222222222222,
"acc_norm_stderr": 0.03246887243637649
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.24022346368715083,
"acc_stderr": 0.014288343803925295,
"acc_norm": 0.24022346368715083,
"acc_norm_stderr": 0.014288343803925295
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.53,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.53,
"acc_norm_stderr": 0.05016135580465919
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4485294117647059,
"acc_stderr": 0.030211479609121603,
"acc_norm": 0.4485294117647059,
"acc_norm_stderr": 0.030211479609121603
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.4530612244897959,
"acc_stderr": 0.03186785930004129,
"acc_norm": 0.4530612244897959,
"acc_norm_stderr": 0.03186785930004129
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.6455696202531646,
"acc_stderr": 0.0311373042971858,
"acc_norm": 0.6455696202531646,
"acc_norm_stderr": 0.0311373042971858
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3272490221642764,
"acc_stderr": 0.011983819806464747,
"acc_norm": 0.3272490221642764,
"acc_norm_stderr": 0.011983819806464747
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.5392156862745098,
"acc_stderr": 0.03498501649369527,
"acc_norm": 0.5392156862745098,
"acc_norm_stderr": 0.03498501649369527
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.5818181818181818,
"acc_stderr": 0.038517163193983926,
"acc_norm": 0.5818181818181818,
"acc_norm_stderr": 0.038517163193983926
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2607099143206854,
"mc1_stderr": 0.015368841620766372,
"mc2": 0.4054851425091592,
"mc2_stderr": 0.014739428749798467
},
"harness|ko_commongen_v2|2": {
"acc": 0.538370720188902,
"acc_stderr": 0.017139660221845553,
"acc_norm": 0.5714285714285714,
"acc_norm_stderr": 0.017014038119297498
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "DopeorNope/Yi_lee-v1-6B",
"model_sha": "74357eee5f2ba34e74129c7955b9cf228e68d857",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}