results / Deepnoid /DND-v0.0-e1 /result_2024-03-26 10:16:42.json
open-ko-llm-bot's picture
Add results for 2024-03-26 10:16:42
1f27d21 verified
raw
history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.5819112627986348,
"acc_stderr": 0.014413988396996083,
"acc_norm": 0.6757679180887372,
"acc_norm_stderr": 0.01367881039951882
},
"harness|ko_hellaswag|10": {
"acc": 0.3521210914160526,
"acc_stderr": 0.0047665533369174885,
"acc_norm": 0.49133638717386974,
"acc_norm_stderr": 0.004989032307320729
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5906432748538012,
"acc_stderr": 0.037712831076265434,
"acc_norm": 0.5906432748538012,
"acc_norm_stderr": 0.037712831076265434
},
"harness|ko_mmlu_management|5": {
"acc": 0.5922330097087378,
"acc_stderr": 0.048657775704107696,
"acc_norm": 0.5922330097087378,
"acc_norm_stderr": 0.048657775704107696
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.6500638569604087,
"acc_stderr": 0.01705567979715043,
"acc_norm": 0.6500638569604087,
"acc_norm_stderr": 0.01705567979715043
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.45925925925925926,
"acc_stderr": 0.04304979692464243,
"acc_norm": 0.45925925925925926,
"acc_norm_stderr": 0.04304979692464243
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.4,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.4,
"acc_norm_stderr": 0.04923659639173309
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.48936170212765956,
"acc_stderr": 0.03267862331014063,
"acc_norm": 0.48936170212765956,
"acc_norm_stderr": 0.03267862331014063
},
"harness|ko_mmlu_virology|5": {
"acc": 0.5542168674698795,
"acc_stderr": 0.03869543323472101,
"acc_norm": 0.5542168674698795,
"acc_norm_stderr": 0.03869543323472101
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.572347266881029,
"acc_stderr": 0.028099240775809574,
"acc_norm": 0.572347266881029,
"acc_norm_stderr": 0.028099240775809574
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.5964125560538116,
"acc_stderr": 0.03292802819330313,
"acc_norm": 0.5964125560538116,
"acc_norm_stderr": 0.03292802819330313
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.5267175572519084,
"acc_stderr": 0.04379024936553894,
"acc_norm": 0.5267175572519084,
"acc_norm_stderr": 0.04379024936553894
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.6161616161616161,
"acc_stderr": 0.03464881675016338,
"acc_norm": 0.6161616161616161,
"acc_norm_stderr": 0.03464881675016338
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.5379310344827586,
"acc_stderr": 0.04154659671707548,
"acc_norm": 0.5379310344827586,
"acc_norm_stderr": 0.04154659671707548
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.04690650298201943,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.04690650298201943
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5504201680672269,
"acc_stderr": 0.03231293497137707,
"acc_norm": 0.5504201680672269,
"acc_norm_stderr": 0.03231293497137707
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5282051282051282,
"acc_stderr": 0.02531063925493386,
"acc_norm": 0.5282051282051282,
"acc_norm_stderr": 0.02531063925493386
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5555555555555556,
"acc_stderr": 0.04803752235190193,
"acc_norm": 0.5555555555555556,
"acc_norm_stderr": 0.04803752235190193
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.458128078817734,
"acc_stderr": 0.03505630140785741,
"acc_norm": 0.458128078817734,
"acc_norm_stderr": 0.03505630140785741
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.5516129032258065,
"acc_stderr": 0.02829205683011273,
"acc_norm": 0.5516129032258065,
"acc_norm_stderr": 0.02829205683011273
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.7478632478632479,
"acc_stderr": 0.02844796547623102,
"acc_norm": 0.7478632478632479,
"acc_norm_stderr": 0.02844796547623102
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.5320754716981132,
"acc_stderr": 0.03070948699255654,
"acc_norm": 0.5320754716981132,
"acc_norm_stderr": 0.03070948699255654
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.6090909090909091,
"acc_stderr": 0.04673752333670239,
"acc_norm": 0.6090909090909091,
"acc_norm_stderr": 0.04673752333670239
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3592592592592593,
"acc_stderr": 0.02925290592725198,
"acc_norm": 0.3592592592592593,
"acc_norm_stderr": 0.02925290592725198
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.37748344370860926,
"acc_stderr": 0.0395802723112157,
"acc_norm": 0.37748344370860926,
"acc_norm_stderr": 0.0395802723112157
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.681592039800995,
"acc_stderr": 0.03294118479054095,
"acc_norm": 0.681592039800995,
"acc_norm_stderr": 0.03294118479054095
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.5375722543352601,
"acc_stderr": 0.0380168510452446,
"acc_norm": 0.5375722543352601,
"acc_norm_stderr": 0.0380168510452446
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.3888888888888889,
"acc_stderr": 0.02510742548113728,
"acc_norm": 0.3888888888888889,
"acc_norm_stderr": 0.02510742548113728
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.4652777777777778,
"acc_stderr": 0.04171115858181618,
"acc_norm": 0.4652777777777778,
"acc_norm_stderr": 0.04171115858181618
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.74,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.74,
"acc_norm_stderr": 0.04408440022768078
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5346820809248555,
"acc_stderr": 0.026854257928258893,
"acc_norm": 0.5346820809248555,
"acc_norm_stderr": 0.026854257928258893
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.5766871165644172,
"acc_stderr": 0.03881891213334384,
"acc_norm": 0.5766871165644172,
"acc_norm_stderr": 0.03881891213334384
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.5987654320987654,
"acc_stderr": 0.027272582849839806,
"acc_norm": 0.5987654320987654,
"acc_norm_stderr": 0.027272582849839806
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.6580310880829016,
"acc_stderr": 0.03423465100104283,
"acc_norm": 0.6580310880829016,
"acc_norm_stderr": 0.03423465100104283
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.37719298245614036,
"acc_stderr": 0.04559522141958215,
"acc_norm": 0.37719298245614036,
"acc_norm_stderr": 0.04559522141958215
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.6954128440366972,
"acc_stderr": 0.01973229942035404,
"acc_norm": 0.6954128440366972,
"acc_norm_stderr": 0.01973229942035404
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.373015873015873,
"acc_stderr": 0.04325506042017086,
"acc_norm": 0.373015873015873,
"acc_norm_stderr": 0.04325506042017086
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.5522875816993464,
"acc_stderr": 0.028472938478033522,
"acc_norm": 0.5522875816993464,
"acc_norm_stderr": 0.028472938478033522
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.57,
"acc_stderr": 0.04975698519562428,
"acc_norm": 0.57,
"acc_norm_stderr": 0.04975698519562428
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6363636363636364,
"acc_stderr": 0.043913262867240704,
"acc_norm": 0.6363636363636364,
"acc_norm_stderr": 0.043913262867240704
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.5131578947368421,
"acc_stderr": 0.04067533136309172,
"acc_norm": 0.5131578947368421,
"acc_norm_stderr": 0.04067533136309172
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.5081699346405228,
"acc_stderr": 0.02022513434305727,
"acc_norm": 0.5081699346405228,
"acc_norm_stderr": 0.02022513434305727
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.4148936170212766,
"acc_stderr": 0.0293922365846125,
"acc_norm": 0.4148936170212766,
"acc_norm_stderr": 0.0293922365846125
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.35714285714285715,
"acc_stderr": 0.04547960999764376,
"acc_norm": 0.35714285714285715,
"acc_norm_stderr": 0.04547960999764376
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.44907407407407407,
"acc_stderr": 0.03392238405321617,
"acc_norm": 0.44907407407407407,
"acc_norm_stderr": 0.03392238405321617
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.28938547486033517,
"acc_stderr": 0.015166544550490303,
"acc_norm": 0.28938547486033517,
"acc_norm_stderr": 0.015166544550490303
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.45,
"acc_stderr": 0.05,
"acc_norm": 0.45,
"acc_norm_stderr": 0.05
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.59,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.59,
"acc_norm_stderr": 0.049431107042371025
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.5294117647058824,
"acc_stderr": 0.030320243265004123,
"acc_norm": 0.5294117647058824,
"acc_norm_stderr": 0.030320243265004123
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5714285714285714,
"acc_stderr": 0.03168091161233882,
"acc_norm": 0.5714285714285714,
"acc_norm_stderr": 0.03168091161233882
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.6075949367088608,
"acc_stderr": 0.0317847187456473,
"acc_norm": 0.6075949367088608,
"acc_norm_stderr": 0.0317847187456473
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.4048239895697523,
"acc_stderr": 0.012536743830953977,
"acc_norm": 0.4048239895697523,
"acc_norm_stderr": 0.012536743830953977
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.5196078431372549,
"acc_stderr": 0.03506612560524866,
"acc_norm": 0.5196078431372549,
"acc_norm_stderr": 0.03506612560524866
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.5333333333333333,
"acc_stderr": 0.03895658065271847,
"acc_norm": 0.5333333333333333,
"acc_norm_stderr": 0.03895658065271847
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.46266829865361075,
"mc1_stderr": 0.017454645150970588,
"mc2": 0.6141257090429322,
"mc2_stderr": 0.015748348539901636
},
"harness|ko_commongen_v2|2": {
"acc": 0.24321133412042503,
"acc_stderr": 0.014750068360453278,
"acc_norm": 0.32231404958677684,
"acc_norm_stderr": 0.016068253615813953
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Deepnoid/DND-v0.0-e1",
"model_sha": "3d2a1d996a2c9b03847d95fa67476aaf2e11a17a",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}