results / eclipsemint /kollama2-7b-v0 /result_2023-10-29 09:14:47.json
open-ko-llm-bot's picture
Add results for 2023-10-29 09:14:47
0f500ae
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.26791808873720135,
"acc_stderr": 0.012942030195136421,
"acc_norm": 0.310580204778157,
"acc_norm_stderr": 0.013522292098053057
},
"harness|ko_hellaswag|10": {
"acc": 0.33389762995419237,
"acc_stderr": 0.004706398252382464,
"acc_norm": 0.4122684724158534,
"acc_norm_stderr": 0.004912370023913011
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.4269005847953216,
"acc_stderr": 0.03793620616529916,
"acc_norm": 0.4269005847953216,
"acc_norm_stderr": 0.03793620616529916
},
"harness|ko_mmlu_management|5": {
"acc": 0.2912621359223301,
"acc_stderr": 0.04498676320572922,
"acc_norm": 0.2912621359223301,
"acc_norm_stderr": 0.04498676320572922
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.34738186462324394,
"acc_stderr": 0.01702667174865573,
"acc_norm": 0.34738186462324394,
"acc_norm_stderr": 0.01702667174865573
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.2740740740740741,
"acc_stderr": 0.03853254836552003,
"acc_norm": 0.2740740740740741,
"acc_norm_stderr": 0.03853254836552003
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.28,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.35319148936170214,
"acc_stderr": 0.031245325202761923,
"acc_norm": 0.35319148936170214,
"acc_norm_stderr": 0.031245325202761923
},
"harness|ko_mmlu_virology|5": {
"acc": 0.2891566265060241,
"acc_stderr": 0.03529486801511115,
"acc_norm": 0.2891566265060241,
"acc_norm_stderr": 0.03529486801511115
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.3440514469453376,
"acc_stderr": 0.026981478043648022,
"acc_norm": 0.3440514469453376,
"acc_norm_stderr": 0.026981478043648022
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.3721973094170404,
"acc_stderr": 0.032443052830087304,
"acc_norm": 0.3721973094170404,
"acc_norm_stderr": 0.032443052830087304
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.2748091603053435,
"acc_stderr": 0.03915345408847837,
"acc_norm": 0.2748091603053435,
"acc_norm_stderr": 0.03915345408847837
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.2676767676767677,
"acc_stderr": 0.031544498882702866,
"acc_norm": 0.2676767676767677,
"acc_norm_stderr": 0.031544498882702866
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.2896551724137931,
"acc_stderr": 0.037800192304380135,
"acc_norm": 0.2896551724137931,
"acc_norm_stderr": 0.037800192304380135
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.23529411764705882,
"acc_stderr": 0.04220773659171453,
"acc_norm": 0.23529411764705882,
"acc_norm_stderr": 0.04220773659171453
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.2773109243697479,
"acc_stderr": 0.029079374539480007,
"acc_norm": 0.2773109243697479,
"acc_norm_stderr": 0.029079374539480007
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.24871794871794872,
"acc_stderr": 0.0219169577092138,
"acc_norm": 0.24871794871794872,
"acc_norm_stderr": 0.0219169577092138
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001974,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001974
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.23,
"acc_stderr": 0.04229525846816505,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816505
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.04557239513497752,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.04557239513497752
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.2413793103448276,
"acc_stderr": 0.030108330718011625,
"acc_norm": 0.2413793103448276,
"acc_norm_stderr": 0.030108330718011625
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.3032258064516129,
"acc_stderr": 0.026148685930671746,
"acc_norm": 0.3032258064516129,
"acc_norm_stderr": 0.026148685930671746
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.41025641025641024,
"acc_stderr": 0.03222414045241107,
"acc_norm": 0.41025641025641024,
"acc_norm_stderr": 0.03222414045241107
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.27547169811320754,
"acc_stderr": 0.027495663683724057,
"acc_norm": 0.27547169811320754,
"acc_norm_stderr": 0.027495663683724057
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.38181818181818183,
"acc_stderr": 0.04653429807913508,
"acc_norm": 0.38181818181818183,
"acc_norm_stderr": 0.04653429807913508
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.22962962962962963,
"acc_stderr": 0.025644108639267624,
"acc_norm": 0.22962962962962963,
"acc_norm_stderr": 0.025644108639267624
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2119205298013245,
"acc_stderr": 0.03336767086567977,
"acc_norm": 0.2119205298013245,
"acc_norm_stderr": 0.03336767086567977
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.3383084577114428,
"acc_stderr": 0.03345563070339192,
"acc_norm": 0.3383084577114428,
"acc_norm_stderr": 0.03345563070339192
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.2543352601156069,
"acc_stderr": 0.0332055644308557,
"acc_norm": 0.2543352601156069,
"acc_norm_stderr": 0.0332055644308557
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.21693121693121692,
"acc_stderr": 0.021227082449445045,
"acc_norm": 0.21693121693121692,
"acc_norm_stderr": 0.021227082449445045
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2361111111111111,
"acc_stderr": 0.03551446610810826,
"acc_norm": 0.2361111111111111,
"acc_norm_stderr": 0.03551446610810826
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.22,
"acc_stderr": 0.041633319989322695,
"acc_norm": 0.22,
"acc_norm_stderr": 0.041633319989322695
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.2745664739884393,
"acc_stderr": 0.02402774515526502,
"acc_norm": 0.2745664739884393,
"acc_norm_stderr": 0.02402774515526502
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.26993865030674846,
"acc_stderr": 0.03487825168497892,
"acc_norm": 0.26993865030674846,
"acc_norm_stderr": 0.03487825168497892
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.3117283950617284,
"acc_stderr": 0.02577311116963045,
"acc_norm": 0.3117283950617284,
"acc_norm_stderr": 0.02577311116963045
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.24352331606217617,
"acc_stderr": 0.030975436386845436,
"acc_norm": 0.24352331606217617,
"acc_norm_stderr": 0.030975436386845436
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2543859649122807,
"acc_stderr": 0.040969851398436716,
"acc_norm": 0.2543859649122807,
"acc_norm_stderr": 0.040969851398436716
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.24403669724770644,
"acc_stderr": 0.018415286351416416,
"acc_norm": 0.24403669724770644,
"acc_norm_stderr": 0.018415286351416416
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.20634920634920634,
"acc_stderr": 0.036196045241242494,
"acc_norm": 0.20634920634920634,
"acc_norm_stderr": 0.036196045241242494
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.28431372549019607,
"acc_stderr": 0.025829163272757475,
"acc_norm": 0.28431372549019607,
"acc_norm_stderr": 0.025829163272757475
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.38,
"acc_stderr": 0.04878317312145632,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145632
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.4462809917355372,
"acc_stderr": 0.04537935177947879,
"acc_norm": 0.4462809917355372,
"acc_norm_stderr": 0.04537935177947879
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.17763157894736842,
"acc_stderr": 0.03110318238312338,
"acc_norm": 0.17763157894736842,
"acc_norm_stderr": 0.03110318238312338
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.2696078431372549,
"acc_stderr": 0.017952449196987862,
"acc_norm": 0.2696078431372549,
"acc_norm_stderr": 0.017952449196987862
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.25177304964539005,
"acc_stderr": 0.025892151156709405,
"acc_norm": 0.25177304964539005,
"acc_norm_stderr": 0.025892151156709405
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.30357142857142855,
"acc_stderr": 0.04364226155841044,
"acc_norm": 0.30357142857142855,
"acc_norm_stderr": 0.04364226155841044
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.24537037037037038,
"acc_stderr": 0.029346665094372937,
"acc_norm": 0.24537037037037038,
"acc_norm_stderr": 0.029346665094372937
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2424581005586592,
"acc_stderr": 0.01433352205921789,
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.01433352205921789
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621504
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542127,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542127
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.22426470588235295,
"acc_stderr": 0.025336848563332372,
"acc_norm": 0.22426470588235295,
"acc_norm_stderr": 0.025336848563332372
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.35918367346938773,
"acc_stderr": 0.03071356045510849,
"acc_norm": 0.35918367346938773,
"acc_norm_stderr": 0.03071356045510849
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.28270042194092826,
"acc_stderr": 0.029312814153955924,
"acc_norm": 0.28270042194092826,
"acc_norm_stderr": 0.029312814153955924
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.24511082138200782,
"acc_stderr": 0.010986307870045517,
"acc_norm": 0.24511082138200782,
"acc_norm_stderr": 0.010986307870045517
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.25980392156862747,
"acc_stderr": 0.030778554678693254,
"acc_norm": 0.25980392156862747,
"acc_norm_stderr": 0.030778554678693254
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.2545454545454545,
"acc_stderr": 0.0340150671524904,
"acc_norm": 0.2545454545454545,
"acc_norm_stderr": 0.0340150671524904
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2766217870257038,
"mc1_stderr": 0.015659605755326905,
"mc2": 0.4649376014172755,
"mc2_stderr": 0.015443831068166118
},
"harness|ko_commongen_v2|2": {
"acc": 0.26564344746162927,
"acc_stderr": 0.015185107107791248,
"acc_norm": 0.35182998819362454,
"acc_norm_stderr": 0.016418206451218054
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "eclipsemint/kollama2-7b-v0",
"model_sha": "e2a3ee343f997cca7ad3e25b5d970376d79c5b4e",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}