results / Edentns /DataVortexS-10.7B-v0.3 /result_2024-01-06 03:14:29.json
open-ko-llm-bot's picture
Add results for 2024-01-06 03:14:29
fbd936c verified
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.26791808873720135,
"acc_stderr": 0.012942030195136423,
"acc_norm": 0.3387372013651877,
"acc_norm_stderr": 0.01383056892797433
},
"harness|ko_hellaswag|10": {
"acc": 0.33947420832503483,
"acc_stderr": 0.0047256309115203165,
"acc_norm": 0.42471619199362676,
"acc_norm_stderr": 0.004932896472460571
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.32748538011695905,
"acc_stderr": 0.035993357714560276,
"acc_norm": 0.32748538011695905,
"acc_norm_stderr": 0.035993357714560276
},
"harness|ko_mmlu_management|5": {
"acc": 0.17475728155339806,
"acc_stderr": 0.037601780060266196,
"acc_norm": 0.17475728155339806,
"acc_norm_stderr": 0.037601780060266196
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.3243933588761175,
"acc_stderr": 0.016740929047162716,
"acc_norm": 0.3243933588761175,
"acc_norm_stderr": 0.016740929047162716
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.26666666666666666,
"acc_stderr": 0.038201699145179055,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.038201699145179055
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.22,
"acc_stderr": 0.0416333199893227,
"acc_norm": 0.22,
"acc_norm_stderr": 0.0416333199893227
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.2553191489361702,
"acc_stderr": 0.028504856470514185,
"acc_norm": 0.2553191489361702,
"acc_norm_stderr": 0.028504856470514185
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3192771084337349,
"acc_stderr": 0.0362933532994786,
"acc_norm": 0.3192771084337349,
"acc_norm_stderr": 0.0362933532994786
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.3408360128617363,
"acc_stderr": 0.02692084126077616,
"acc_norm": 0.3408360128617363,
"acc_norm_stderr": 0.02692084126077616
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.43946188340807174,
"acc_stderr": 0.03331092511038179,
"acc_norm": 0.43946188340807174,
"acc_norm_stderr": 0.03331092511038179
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.2748091603053435,
"acc_stderr": 0.039153454088478354,
"acc_norm": 0.2748091603053435,
"acc_norm_stderr": 0.039153454088478354
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252605,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252605
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.03191178226713547,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.03191178226713547
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.3103448275862069,
"acc_stderr": 0.03855289616378949,
"acc_norm": 0.3103448275862069,
"acc_norm_stderr": 0.03855289616378949
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.14705882352941177,
"acc_stderr": 0.03524068951567446,
"acc_norm": 0.14705882352941177,
"acc_norm_stderr": 0.03524068951567446
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.2647058823529412,
"acc_stderr": 0.028657491285071973,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.028657491285071973
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.3435897435897436,
"acc_stderr": 0.024078696580635467,
"acc_norm": 0.3435897435897436,
"acc_norm_stderr": 0.024078696580635467
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768079,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768079
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.25,
"acc_stderr": 0.04186091791394607,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04186091791394607
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.19704433497536947,
"acc_stderr": 0.02798672466673622,
"acc_norm": 0.19704433497536947,
"acc_norm_stderr": 0.02798672466673622
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.29354838709677417,
"acc_stderr": 0.02590608702131929,
"acc_norm": 0.29354838709677417,
"acc_norm_stderr": 0.02590608702131929
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.02934311479809447,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.02934311479809447
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.2528301886792453,
"acc_stderr": 0.026749899771241238,
"acc_norm": 0.2528301886792453,
"acc_norm_stderr": 0.026749899771241238
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.2909090909090909,
"acc_stderr": 0.04350271442923243,
"acc_norm": 0.2909090909090909,
"acc_norm_stderr": 0.04350271442923243
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.25555555555555554,
"acc_stderr": 0.02659393910184407,
"acc_norm": 0.25555555555555554,
"acc_norm_stderr": 0.02659393910184407
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2847682119205298,
"acc_stderr": 0.03684881521389024,
"acc_norm": 0.2847682119205298,
"acc_norm_stderr": 0.03684881521389024
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.35323383084577115,
"acc_stderr": 0.03379790611796776,
"acc_norm": 0.35323383084577115,
"acc_norm_stderr": 0.03379790611796776
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3236994219653179,
"acc_stderr": 0.0356760379963917,
"acc_norm": 0.3236994219653179,
"acc_norm_stderr": 0.0356760379963917
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.24867724867724866,
"acc_stderr": 0.022261817692400175,
"acc_norm": 0.24867724867724866,
"acc_norm_stderr": 0.022261817692400175
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.24305555555555555,
"acc_stderr": 0.035868792800803406,
"acc_norm": 0.24305555555555555,
"acc_norm_stderr": 0.035868792800803406
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.44,
"acc_norm_stderr": 0.04988876515698589
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.26011560693641617,
"acc_stderr": 0.023618678310069363,
"acc_norm": 0.26011560693641617,
"acc_norm_stderr": 0.023618678310069363
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.24539877300613497,
"acc_stderr": 0.03380939813943354,
"acc_norm": 0.24539877300613497,
"acc_norm_stderr": 0.03380939813943354
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.25308641975308643,
"acc_stderr": 0.024191808600713,
"acc_norm": 0.25308641975308643,
"acc_norm_stderr": 0.024191808600713
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.24,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.24,
"acc_norm_stderr": 0.042923469599092816
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.35233160621761656,
"acc_stderr": 0.03447478286414358,
"acc_norm": 0.35233160621761656,
"acc_norm_stderr": 0.03447478286414358
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2543859649122807,
"acc_stderr": 0.040969851398436716,
"acc_norm": 0.2543859649122807,
"acc_norm_stderr": 0.040969851398436716
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.28256880733944956,
"acc_stderr": 0.019304243497707152,
"acc_norm": 0.28256880733944956,
"acc_norm_stderr": 0.019304243497707152
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2619047619047619,
"acc_stderr": 0.039325376803928704,
"acc_norm": 0.2619047619047619,
"acc_norm_stderr": 0.039325376803928704
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.27124183006535946,
"acc_stderr": 0.025457756696667867,
"acc_norm": 0.27124183006535946,
"acc_norm_stderr": 0.025457756696667867
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.27,
"acc_stderr": 0.04461960433384741,
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384741
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.24793388429752067,
"acc_stderr": 0.039418975265163025,
"acc_norm": 0.24793388429752067,
"acc_norm_stderr": 0.039418975265163025
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.19078947368421054,
"acc_stderr": 0.031975658210325,
"acc_norm": 0.19078947368421054,
"acc_norm_stderr": 0.031975658210325
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.25163398692810457,
"acc_stderr": 0.017555818091322277,
"acc_norm": 0.25163398692810457,
"acc_norm_stderr": 0.017555818091322277
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2730496453900709,
"acc_stderr": 0.026577860943307857,
"acc_norm": 0.2730496453900709,
"acc_norm_stderr": 0.026577860943307857
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.2857142857142857,
"acc_stderr": 0.04287858751340456,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.04287858751340456
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.39351851851851855,
"acc_stderr": 0.03331747876370312,
"acc_norm": 0.39351851851851855,
"acc_norm_stderr": 0.03331747876370312
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.22346368715083798,
"acc_stderr": 0.01393206863857977,
"acc_norm": 0.22346368715083798,
"acc_norm_stderr": 0.01393206863857977
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.3860294117647059,
"acc_stderr": 0.029573269134411124,
"acc_norm": 0.3860294117647059,
"acc_norm_stderr": 0.029573269134411124
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.39591836734693875,
"acc_stderr": 0.03130802899065685,
"acc_norm": 0.39591836734693875,
"acc_norm_stderr": 0.03130802899065685
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.2911392405063291,
"acc_stderr": 0.02957160106575337,
"acc_norm": 0.2911392405063291,
"acc_norm_stderr": 0.02957160106575337
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.23728813559322035,
"acc_stderr": 0.010865436690780262,
"acc_norm": 0.23728813559322035,
"acc_norm_stderr": 0.010865436690780262
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.28431372549019607,
"acc_stderr": 0.03166009679399812,
"acc_norm": 0.28431372549019607,
"acc_norm_stderr": 0.03166009679399812
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.21818181818181817,
"acc_stderr": 0.03225078108306289,
"acc_norm": 0.21818181818181817,
"acc_norm_stderr": 0.03225078108306289
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2802937576499388,
"mc1_stderr": 0.01572313952460874,
"mc2": 0.4609143085702979,
"mc2_stderr": 0.015478893728638463
},
"harness|ko_commongen_v2|2": {
"acc": 0.2939787485242031,
"acc_stderr": 0.015663242569091122,
"acc_norm": 0.371900826446281,
"acc_norm_stderr": 0.016616612843224944
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Edentns/DataVortexS-10.7B-v0.3",
"model_sha": "00c0018dd81707ea37ce355e68f5e6d241a63261",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}