results / MNCJ1hun /Zephyr-7B-alpha-OP-u1k-ver0.1 /result_2023-10-29 00:21:54.json
open-ko-llm-bot's picture
Add results for 2023-10-29 00:21:54
45d4c07
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3532423208191126,
"acc_stderr": 0.013967822714840055,
"acc_norm": 0.41723549488054607,
"acc_norm_stderr": 0.014409825518403082
},
"harness|ko_hellaswag|10": {
"acc": 0.38149770961959767,
"acc_stderr": 0.00484761521647345,
"acc_norm": 0.4923322047400916,
"acc_norm_stderr": 0.0049891946277078525
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5087719298245614,
"acc_stderr": 0.03834234744164993,
"acc_norm": 0.5087719298245614,
"acc_norm_stderr": 0.03834234744164993
},
"harness|ko_mmlu_management|5": {
"acc": 0.6116504854368932,
"acc_stderr": 0.04825729337356389,
"acc_norm": 0.6116504854368932,
"acc_norm_stderr": 0.04825729337356389
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.46871008939974457,
"acc_stderr": 0.017844918090468547,
"acc_norm": 0.46871008939974457,
"acc_norm_stderr": 0.017844918090468547
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.34074074074074073,
"acc_stderr": 0.04094376269996794,
"acc_norm": 0.34074074074074073,
"acc_norm_stderr": 0.04094376269996794
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.29,
"acc_stderr": 0.04560480215720683,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720683
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.4553191489361702,
"acc_stderr": 0.03255525359340355,
"acc_norm": 0.4553191489361702,
"acc_norm_stderr": 0.03255525359340355
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3795180722891566,
"acc_stderr": 0.037777988227480165,
"acc_norm": 0.3795180722891566,
"acc_norm_stderr": 0.037777988227480165
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.4855305466237942,
"acc_stderr": 0.02838619808417768,
"acc_norm": 0.4855305466237942,
"acc_norm_stderr": 0.02838619808417768
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.4618834080717489,
"acc_stderr": 0.033460150119732274,
"acc_norm": 0.4618834080717489,
"acc_norm_stderr": 0.033460150119732274
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.4122137404580153,
"acc_stderr": 0.04317171194870255,
"acc_norm": 0.4122137404580153,
"acc_norm_stderr": 0.04317171194870255
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5151515151515151,
"acc_stderr": 0.03560716516531061,
"acc_norm": 0.5151515151515151,
"acc_norm_stderr": 0.03560716516531061
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.3586206896551724,
"acc_stderr": 0.039966295748767186,
"acc_norm": 0.3586206896551724,
"acc_norm_stderr": 0.039966295748767186
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.04158307533083286,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.04158307533083286
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.46218487394957986,
"acc_stderr": 0.032385469487589795,
"acc_norm": 0.46218487394957986,
"acc_norm_stderr": 0.032385469487589795
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.43333333333333335,
"acc_stderr": 0.02512465352588513,
"acc_norm": 0.43333333333333335,
"acc_norm_stderr": 0.02512465352588513
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.58,
"acc_stderr": 0.04960449637488583,
"acc_norm": 0.58,
"acc_norm_stderr": 0.04960449637488583
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.49074074074074076,
"acc_stderr": 0.04832853553437056,
"acc_norm": 0.49074074074074076,
"acc_norm_stderr": 0.04832853553437056
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3694581280788177,
"acc_stderr": 0.03395970381998573,
"acc_norm": 0.3694581280788177,
"acc_norm_stderr": 0.03395970381998573
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.45806451612903226,
"acc_stderr": 0.028343787250540636,
"acc_norm": 0.45806451612903226,
"acc_norm_stderr": 0.028343787250540636
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.688034188034188,
"acc_stderr": 0.03035152732334493,
"acc_norm": 0.688034188034188,
"acc_norm_stderr": 0.03035152732334493
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4641509433962264,
"acc_stderr": 0.030693675018458003,
"acc_norm": 0.4641509433962264,
"acc_norm_stderr": 0.030693675018458003
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5272727272727272,
"acc_stderr": 0.04782001791380061,
"acc_norm": 0.5272727272727272,
"acc_norm_stderr": 0.04782001791380061
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3111111111111111,
"acc_stderr": 0.028226446749683515,
"acc_norm": 0.3111111111111111,
"acc_norm_stderr": 0.028226446749683515
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.25165562913907286,
"acc_stderr": 0.03543304234389985,
"acc_norm": 0.25165562913907286,
"acc_norm_stderr": 0.03543304234389985
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6169154228855721,
"acc_stderr": 0.034375193373382504,
"acc_norm": 0.6169154228855721,
"acc_norm_stderr": 0.034375193373382504
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3352601156069364,
"acc_stderr": 0.035995863012470784,
"acc_norm": 0.3352601156069364,
"acc_norm_stderr": 0.035995863012470784
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.335978835978836,
"acc_stderr": 0.02432631052914913,
"acc_norm": 0.335978835978836,
"acc_norm_stderr": 0.02432631052914913
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3263888888888889,
"acc_stderr": 0.03921067198982266,
"acc_norm": 0.3263888888888889,
"acc_norm_stderr": 0.03921067198982266
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5,
"acc_stderr": 0.026919095102908273,
"acc_norm": 0.5,
"acc_norm_stderr": 0.026919095102908273
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.49079754601226994,
"acc_stderr": 0.039277056007874414,
"acc_norm": 0.49079754601226994,
"acc_norm_stderr": 0.039277056007874414
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.36728395061728397,
"acc_stderr": 0.026822801759507894,
"acc_norm": 0.36728395061728397,
"acc_norm_stderr": 0.026822801759507894
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.538860103626943,
"acc_stderr": 0.035975244117345775,
"acc_norm": 0.538860103626943,
"acc_norm_stderr": 0.035975244117345775
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.3157894736842105,
"acc_stderr": 0.04372748290278008,
"acc_norm": 0.3157894736842105,
"acc_norm_stderr": 0.04372748290278008
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5045871559633027,
"acc_stderr": 0.02143642095552942,
"acc_norm": 0.5045871559633027,
"acc_norm_stderr": 0.02143642095552942
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.04216370213557835,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.04216370213557835
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.49019607843137253,
"acc_stderr": 0.028624412550167965,
"acc_norm": 0.49019607843137253,
"acc_norm_stderr": 0.028624412550167965
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6198347107438017,
"acc_stderr": 0.04431324501968432,
"acc_norm": 0.6198347107438017,
"acc_norm_stderr": 0.04431324501968432
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.39473684210526316,
"acc_stderr": 0.03977749934622074,
"acc_norm": 0.39473684210526316,
"acc_norm_stderr": 0.03977749934622074
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3709150326797386,
"acc_stderr": 0.01954210156485412,
"acc_norm": 0.3709150326797386,
"acc_norm_stderr": 0.01954210156485412
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.028121636040639886,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.028121636040639886
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.32142857142857145,
"acc_stderr": 0.04432804055291521,
"acc_norm": 0.32142857142857145,
"acc_norm_stderr": 0.04432804055291521
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.39351851851851855,
"acc_stderr": 0.03331747876370312,
"acc_norm": 0.39351851851851855,
"acc_norm_stderr": 0.03331747876370312
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.24134078212290502,
"acc_stderr": 0.014310999547961459,
"acc_norm": 0.24134078212290502,
"acc_norm_stderr": 0.014310999547961459
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001975
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620332
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4117647058823529,
"acc_stderr": 0.029896163033125468,
"acc_norm": 0.4117647058823529,
"acc_norm_stderr": 0.029896163033125468
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.44081632653061226,
"acc_stderr": 0.03178419114175363,
"acc_norm": 0.44081632653061226,
"acc_norm_stderr": 0.03178419114175363
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.5189873417721519,
"acc_stderr": 0.03252375148090447,
"acc_norm": 0.5189873417721519,
"acc_norm_stderr": 0.03252375148090447
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.29921773142112124,
"acc_stderr": 0.011695374630696052,
"acc_norm": 0.29921773142112124,
"acc_norm_stderr": 0.011695374630696052
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.5,
"acc_stderr": 0.03509312031717982,
"acc_norm": 0.5,
"acc_norm_stderr": 0.03509312031717982
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.46060606060606063,
"acc_stderr": 0.03892207016552013,
"acc_norm": 0.46060606060606063,
"acc_norm_stderr": 0.03892207016552013
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2741738066095471,
"mc1_stderr": 0.015616518497219371,
"mc2": 0.45601808163931185,
"mc2_stderr": 0.015622209231910858
},
"harness|ko_commongen_v2|2": {
"acc": 0.40731995277449823,
"acc_stderr": 0.01689245669519127,
"acc_norm": 0.4604486422668241,
"acc_norm_stderr": 0.017136487626049846
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "MNCJ1hun/Zephyr-7B-alpha-OP-u1k-ver0.1",
"model_sha": "7692de676eb6a3561d10a21a64bcf45cc629665b",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}