Llama-2-7b-ultrachat200k-pruned_70 / mmlu_5shot_bs4_bf16.json
mgoin's picture
Upload folder using huggingface_hub
1b44e4b verified
raw
history blame
14.3 kB
{
"results": {
"hendrycksTest-abstract_algebra": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"hendrycksTest-anatomy": {
"acc": 0.32592592592592595,
"acc_stderr": 0.040491220417025055,
"acc_norm": 0.32592592592592595,
"acc_norm_stderr": 0.040491220417025055
},
"hendrycksTest-astronomy": {
"acc": 0.23026315789473684,
"acc_stderr": 0.03426059424403165,
"acc_norm": 0.23026315789473684,
"acc_norm_stderr": 0.03426059424403165
},
"hendrycksTest-business_ethics": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"hendrycksTest-clinical_knowledge": {
"acc": 0.3584905660377358,
"acc_stderr": 0.02951470358398177,
"acc_norm": 0.3584905660377358,
"acc_norm_stderr": 0.02951470358398177
},
"hendrycksTest-college_biology": {
"acc": 0.2916666666666667,
"acc_stderr": 0.038009680605548594,
"acc_norm": 0.2916666666666667,
"acc_norm_stderr": 0.038009680605548594
},
"hendrycksTest-college_chemistry": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"hendrycksTest-college_computer_science": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"hendrycksTest-college_mathematics": {
"acc": 0.23,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816506
},
"hendrycksTest-college_medicine": {
"acc": 0.2832369942196532,
"acc_stderr": 0.034355680560478746,
"acc_norm": 0.2832369942196532,
"acc_norm_stderr": 0.034355680560478746
},
"hendrycksTest-college_physics": {
"acc": 0.21568627450980393,
"acc_stderr": 0.04092563958237655,
"acc_norm": 0.21568627450980393,
"acc_norm_stderr": 0.04092563958237655
},
"hendrycksTest-computer_security": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"hendrycksTest-conceptual_physics": {
"acc": 0.3574468085106383,
"acc_stderr": 0.03132941789476425,
"acc_norm": 0.3574468085106383,
"acc_norm_stderr": 0.03132941789476425
},
"hendrycksTest-econometrics": {
"acc": 0.24561403508771928,
"acc_stderr": 0.04049339297748142,
"acc_norm": 0.24561403508771928,
"acc_norm_stderr": 0.04049339297748142
},
"hendrycksTest-electrical_engineering": {
"acc": 0.2827586206896552,
"acc_stderr": 0.03752833958003337,
"acc_norm": 0.2827586206896552,
"acc_norm_stderr": 0.03752833958003337
},
"hendrycksTest-elementary_mathematics": {
"acc": 0.23809523809523808,
"acc_stderr": 0.021935878081184766,
"acc_norm": 0.23809523809523808,
"acc_norm_stderr": 0.021935878081184766
},
"hendrycksTest-formal_logic": {
"acc": 0.1984126984126984,
"acc_stderr": 0.035670166752768635,
"acc_norm": 0.1984126984126984,
"acc_norm_stderr": 0.035670166752768635
},
"hendrycksTest-global_facts": {
"acc": 0.34,
"acc_stderr": 0.04760952285695236,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"hendrycksTest-high_school_biology": {
"acc": 0.3225806451612903,
"acc_stderr": 0.02659308451657229,
"acc_norm": 0.3225806451612903,
"acc_norm_stderr": 0.02659308451657229
},
"hendrycksTest-high_school_chemistry": {
"acc": 0.2315270935960591,
"acc_stderr": 0.02967833314144444,
"acc_norm": 0.2315270935960591,
"acc_norm_stderr": 0.02967833314144444
},
"hendrycksTest-high_school_computer_science": {
"acc": 0.34,
"acc_stderr": 0.047609522856952365,
"acc_norm": 0.34,
"acc_norm_stderr": 0.047609522856952365
},
"hendrycksTest-high_school_european_history": {
"acc": 0.4,
"acc_stderr": 0.03825460278380026,
"acc_norm": 0.4,
"acc_norm_stderr": 0.03825460278380026
},
"hendrycksTest-high_school_geography": {
"acc": 0.3383838383838384,
"acc_stderr": 0.03371124142626303,
"acc_norm": 0.3383838383838384,
"acc_norm_stderr": 0.03371124142626303
},
"hendrycksTest-high_school_government_and_politics": {
"acc": 0.32642487046632124,
"acc_stderr": 0.033840286211432945,
"acc_norm": 0.32642487046632124,
"acc_norm_stderr": 0.033840286211432945
},
"hendrycksTest-high_school_macroeconomics": {
"acc": 0.2358974358974359,
"acc_stderr": 0.021525965407408726,
"acc_norm": 0.2358974358974359,
"acc_norm_stderr": 0.021525965407408726
},
"hendrycksTest-high_school_mathematics": {
"acc": 0.23703703703703705,
"acc_stderr": 0.025928876132766104,
"acc_norm": 0.23703703703703705,
"acc_norm_stderr": 0.025928876132766104
},
"hendrycksTest-high_school_microeconomics": {
"acc": 0.29411764705882354,
"acc_stderr": 0.02959732973097808,
"acc_norm": 0.29411764705882354,
"acc_norm_stderr": 0.02959732973097808
},
"hendrycksTest-high_school_physics": {
"acc": 0.2582781456953642,
"acc_stderr": 0.035737053147634576,
"acc_norm": 0.2582781456953642,
"acc_norm_stderr": 0.035737053147634576
},
"hendrycksTest-high_school_psychology": {
"acc": 0.3779816513761468,
"acc_stderr": 0.020789187066728117,
"acc_norm": 0.3779816513761468,
"acc_norm_stderr": 0.020789187066728117
},
"hendrycksTest-high_school_statistics": {
"acc": 0.1712962962962963,
"acc_stderr": 0.025695341643824674,
"acc_norm": 0.1712962962962963,
"acc_norm_stderr": 0.025695341643824674
},
"hendrycksTest-high_school_us_history": {
"acc": 0.3627450980392157,
"acc_stderr": 0.03374499356319355,
"acc_norm": 0.3627450980392157,
"acc_norm_stderr": 0.03374499356319355
},
"hendrycksTest-high_school_world_history": {
"acc": 0.47257383966244726,
"acc_stderr": 0.03249822718301303,
"acc_norm": 0.47257383966244726,
"acc_norm_stderr": 0.03249822718301303
},
"hendrycksTest-human_aging": {
"acc": 0.4080717488789238,
"acc_stderr": 0.03298574607842821,
"acc_norm": 0.4080717488789238,
"acc_norm_stderr": 0.03298574607842821
},
"hendrycksTest-human_sexuality": {
"acc": 0.3053435114503817,
"acc_stderr": 0.040393149787245626,
"acc_norm": 0.3053435114503817,
"acc_norm_stderr": 0.040393149787245626
},
"hendrycksTest-international_law": {
"acc": 0.5619834710743802,
"acc_stderr": 0.045291468044357915,
"acc_norm": 0.5619834710743802,
"acc_norm_stderr": 0.045291468044357915
},
"hendrycksTest-jurisprudence": {
"acc": 0.37037037037037035,
"acc_stderr": 0.04668408033024932,
"acc_norm": 0.37037037037037035,
"acc_norm_stderr": 0.04668408033024932
},
"hendrycksTest-logical_fallacies": {
"acc": 0.38650306748466257,
"acc_stderr": 0.038258255488486076,
"acc_norm": 0.38650306748466257,
"acc_norm_stderr": 0.038258255488486076
},
"hendrycksTest-machine_learning": {
"acc": 0.3482142857142857,
"acc_stderr": 0.04521829902833585,
"acc_norm": 0.3482142857142857,
"acc_norm_stderr": 0.04521829902833585
},
"hendrycksTest-management": {
"acc": 0.3300970873786408,
"acc_stderr": 0.0465614711001235,
"acc_norm": 0.3300970873786408,
"acc_norm_stderr": 0.0465614711001235
},
"hendrycksTest-marketing": {
"acc": 0.47435897435897434,
"acc_stderr": 0.03271298896811159,
"acc_norm": 0.47435897435897434,
"acc_norm_stderr": 0.03271298896811159
},
"hendrycksTest-medical_genetics": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"hendrycksTest-miscellaneous": {
"acc": 0.47126436781609193,
"acc_stderr": 0.01785041079438017,
"acc_norm": 0.47126436781609193,
"acc_norm_stderr": 0.01785041079438017
},
"hendrycksTest-moral_disputes": {
"acc": 0.315028901734104,
"acc_stderr": 0.025009313790069727,
"acc_norm": 0.315028901734104,
"acc_norm_stderr": 0.025009313790069727
},
"hendrycksTest-moral_scenarios": {
"acc": 0.23798882681564246,
"acc_stderr": 0.014242630070574915,
"acc_norm": 0.23798882681564246,
"acc_norm_stderr": 0.014242630070574915
},
"hendrycksTest-nutrition": {
"acc": 0.369281045751634,
"acc_stderr": 0.027634176689602653,
"acc_norm": 0.369281045751634,
"acc_norm_stderr": 0.027634176689602653
},
"hendrycksTest-philosophy": {
"acc": 0.3633440514469453,
"acc_stderr": 0.027316847674192717,
"acc_norm": 0.3633440514469453,
"acc_norm_stderr": 0.027316847674192717
},
"hendrycksTest-prehistory": {
"acc": 0.36419753086419754,
"acc_stderr": 0.02677492989972233,
"acc_norm": 0.36419753086419754,
"acc_norm_stderr": 0.02677492989972233
},
"hendrycksTest-professional_accounting": {
"acc": 0.30141843971631205,
"acc_stderr": 0.02737412888263115,
"acc_norm": 0.30141843971631205,
"acc_norm_stderr": 0.02737412888263115
},
"hendrycksTest-professional_law": {
"acc": 0.3044328552803129,
"acc_stderr": 0.011752877592597575,
"acc_norm": 0.3044328552803129,
"acc_norm_stderr": 0.011752877592597575
},
"hendrycksTest-professional_medicine": {
"acc": 0.2426470588235294,
"acc_stderr": 0.026040662474201264,
"acc_norm": 0.2426470588235294,
"acc_norm_stderr": 0.026040662474201264
},
"hendrycksTest-professional_psychology": {
"acc": 0.3104575163398693,
"acc_stderr": 0.018718067052623227,
"acc_norm": 0.3104575163398693,
"acc_norm_stderr": 0.018718067052623227
},
"hendrycksTest-public_relations": {
"acc": 0.38181818181818183,
"acc_stderr": 0.04653429807913508,
"acc_norm": 0.38181818181818183,
"acc_norm_stderr": 0.04653429807913508
},
"hendrycksTest-security_studies": {
"acc": 0.3020408163265306,
"acc_stderr": 0.029393609319879818,
"acc_norm": 0.3020408163265306,
"acc_norm_stderr": 0.029393609319879818
},
"hendrycksTest-sociology": {
"acc": 0.4079601990049751,
"acc_stderr": 0.034751163651940926,
"acc_norm": 0.4079601990049751,
"acc_norm_stderr": 0.034751163651940926
},
"hendrycksTest-us_foreign_policy": {
"acc": 0.41,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.41,
"acc_norm_stderr": 0.04943110704237102
},
"hendrycksTest-virology": {
"acc": 0.4036144578313253,
"acc_stderr": 0.038194861407583984,
"acc_norm": 0.4036144578313253,
"acc_norm_stderr": 0.038194861407583984
},
"hendrycksTest-world_religions": {
"acc": 0.40350877192982454,
"acc_stderr": 0.03762738699917056,
"acc_norm": 0.40350877192982454,
"acc_norm_stderr": 0.03762738699917056
}
},
"versions": {
"hendrycksTest-abstract_algebra": 1,
"hendrycksTest-anatomy": 1,
"hendrycksTest-astronomy": 1,
"hendrycksTest-business_ethics": 1,
"hendrycksTest-clinical_knowledge": 1,
"hendrycksTest-college_biology": 1,
"hendrycksTest-college_chemistry": 1,
"hendrycksTest-college_computer_science": 1,
"hendrycksTest-college_mathematics": 1,
"hendrycksTest-college_medicine": 1,
"hendrycksTest-college_physics": 1,
"hendrycksTest-computer_security": 1,
"hendrycksTest-conceptual_physics": 1,
"hendrycksTest-econometrics": 1,
"hendrycksTest-electrical_engineering": 1,
"hendrycksTest-elementary_mathematics": 1,
"hendrycksTest-formal_logic": 1,
"hendrycksTest-global_facts": 1,
"hendrycksTest-high_school_biology": 1,
"hendrycksTest-high_school_chemistry": 1,
"hendrycksTest-high_school_computer_science": 1,
"hendrycksTest-high_school_european_history": 1,
"hendrycksTest-high_school_geography": 1,
"hendrycksTest-high_school_government_and_politics": 1,
"hendrycksTest-high_school_macroeconomics": 1,
"hendrycksTest-high_school_mathematics": 1,
"hendrycksTest-high_school_microeconomics": 1,
"hendrycksTest-high_school_physics": 1,
"hendrycksTest-high_school_psychology": 1,
"hendrycksTest-high_school_statistics": 1,
"hendrycksTest-high_school_us_history": 1,
"hendrycksTest-high_school_world_history": 1,
"hendrycksTest-human_aging": 1,
"hendrycksTest-human_sexuality": 1,
"hendrycksTest-international_law": 1,
"hendrycksTest-jurisprudence": 1,
"hendrycksTest-logical_fallacies": 1,
"hendrycksTest-machine_learning": 1,
"hendrycksTest-management": 1,
"hendrycksTest-marketing": 1,
"hendrycksTest-medical_genetics": 1,
"hendrycksTest-miscellaneous": 1,
"hendrycksTest-moral_disputes": 1,
"hendrycksTest-moral_scenarios": 1,
"hendrycksTest-nutrition": 1,
"hendrycksTest-philosophy": 1,
"hendrycksTest-prehistory": 1,
"hendrycksTest-professional_accounting": 1,
"hendrycksTest-professional_law": 1,
"hendrycksTest-professional_medicine": 1,
"hendrycksTest-professional_psychology": 1,
"hendrycksTest-public_relations": 1,
"hendrycksTest-security_studies": 1,
"hendrycksTest-sociology": 1,
"hendrycksTest-us_foreign_policy": 1,
"hendrycksTest-virology": 1,
"hendrycksTest-world_religions": 1
},
"config": {
"model": "sparseml",
"model_args": "pretrained=/network/alexandre/research/cerebras/llama2_7B_sparse70_retrained/ultrachat200k/llama2_7B_sparse70_LR3e-4_GC2_E2/training,dtype=bfloat16",
"num_fewshot": 5,
"batch_size": "4",
"batch_sizes": [],
"device": "cuda:7",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}