{ "results": { "hendrycksTest-abstract_algebra": { "acc": 0.12, "acc_stderr": 0.066332495807108, "acc_norm": 0.12, "acc_norm_stderr": 0.066332495807108 }, "hendrycksTest-anatomy": { "acc": 0.16, "acc_stderr": 0.0748331477354788, "acc_norm": 0.16, "acc_norm_stderr": 0.0748331477354788 }, "hendrycksTest-astronomy": { "acc": 0.28, "acc_stderr": 0.0916515138991168, "acc_norm": 0.28, "acc_norm_stderr": 0.0916515138991168 }, "hendrycksTest-business_ethics": { "acc": 0.16, "acc_stderr": 0.0748331477354788, "acc_norm": 0.16, "acc_norm_stderr": 0.0748331477354788 }, "hendrycksTest-clinical_knowledge": { "acc": 0.32, "acc_stderr": 0.09521904571390466, "acc_norm": 0.32, "acc_norm_stderr": 0.09521904571390466 }, "hendrycksTest-college_biology": { "acc": 0.24, "acc_stderr": 0.08717797887081345, "acc_norm": 0.24, "acc_norm_stderr": 0.08717797887081345 }, "hendrycksTest-college_chemistry": { "acc": 0.16, "acc_stderr": 0.0748331477354788, "acc_norm": 0.16, "acc_norm_stderr": 0.0748331477354788 }, "hendrycksTest-college_computer_science": { "acc": 0.28, "acc_stderr": 0.0916515138991168, "acc_norm": 0.28, "acc_norm_stderr": 0.0916515138991168 }, "hendrycksTest-college_mathematics": { "acc": 0.28, "acc_stderr": 0.0916515138991168, "acc_norm": 0.28, "acc_norm_stderr": 0.0916515138991168 }, "hendrycksTest-college_medicine": { "acc": 0.16, "acc_stderr": 0.07483314773547879, "acc_norm": 0.16, "acc_norm_stderr": 0.07483314773547879 }, "hendrycksTest-college_physics": { "acc": 0.2, "acc_stderr": 0.08164965809277261, "acc_norm": 0.2, "acc_norm_stderr": 0.08164965809277261 }, "hendrycksTest-computer_security": { "acc": 0.32, "acc_stderr": 0.09521904571390465, "acc_norm": 0.32, "acc_norm_stderr": 0.09521904571390465 }, "hendrycksTest-conceptual_physics": { "acc": 0.2, "acc_stderr": 0.08164965809277261, "acc_norm": 0.2, "acc_norm_stderr": 0.08164965809277261 }, "hendrycksTest-econometrics": { "acc": 0.2, "acc_stderr": 0.08164965809277261, "acc_norm": 0.2, "acc_norm_stderr": 0.08164965809277261 }, "hendrycksTest-electrical_engineering": { "acc": 0.24, "acc_stderr": 0.08717797887081345, "acc_norm": 0.24, "acc_norm_stderr": 0.08717797887081345 }, "hendrycksTest-elementary_mathematics": { "acc": 0.12, "acc_stderr": 0.06633249580710802, "acc_norm": 0.12, "acc_norm_stderr": 0.06633249580710802 }, "hendrycksTest-formal_logic": { "acc": 0.32, "acc_stderr": 0.09521904571390465, "acc_norm": 0.32, "acc_norm_stderr": 0.09521904571390465 }, "hendrycksTest-global_facts": { "acc": 0.24, "acc_stderr": 0.08717797887081347, "acc_norm": 0.24, "acc_norm_stderr": 0.08717797887081347 }, "hendrycksTest-high_school_biology": { "acc": 0.24, "acc_stderr": 0.08717797887081345, "acc_norm": 0.24, "acc_norm_stderr": 0.08717797887081345 }, "hendrycksTest-high_school_chemistry": { "acc": 0.28, "acc_stderr": 0.09165151389911681, "acc_norm": 0.28, "acc_norm_stderr": 0.09165151389911681 }, "hendrycksTest-high_school_computer_science": { "acc": 0.08, "acc_stderr": 0.05537749241945382, "acc_norm": 0.08, "acc_norm_stderr": 0.05537749241945382 }, "hendrycksTest-high_school_european_history": { "acc": 0.12, "acc_stderr": 0.066332495807108, "acc_norm": 0.12, "acc_norm_stderr": 0.066332495807108 }, "hendrycksTest-high_school_geography": { "acc": 0.32, "acc_stderr": 0.09521904571390466, "acc_norm": 0.32, "acc_norm_stderr": 0.09521904571390466 }, "hendrycksTest-high_school_government_and_politics": { "acc": 0.16, "acc_stderr": 0.0748331477354788, "acc_norm": 0.16, "acc_norm_stderr": 0.0748331477354788 }, "hendrycksTest-high_school_macroeconomics": { "acc": 0.32, "acc_stderr": 0.09521904571390467, "acc_norm": 0.32, "acc_norm_stderr": 0.09521904571390467 }, "hendrycksTest-high_school_mathematics": { "acc": 0.32, "acc_stderr": 0.09521904571390465, "acc_norm": 0.32, "acc_norm_stderr": 0.09521904571390465 }, "hendrycksTest-high_school_microeconomics": { "acc": 0.2, "acc_stderr": 0.08164965809277261, "acc_norm": 0.2, "acc_norm_stderr": 0.08164965809277261 }, "hendrycksTest-high_school_physics": { "acc": 0.2, "acc_stderr": 0.08164965809277262, "acc_norm": 0.2, "acc_norm_stderr": 0.08164965809277262 }, "hendrycksTest-high_school_psychology": { "acc": 0.2, "acc_stderr": 0.08164965809277261, "acc_norm": 0.2, "acc_norm_stderr": 0.08164965809277261 }, "hendrycksTest-high_school_statistics": { "acc": 0.36, "acc_stderr": 0.09797958971132711, "acc_norm": 0.36, "acc_norm_stderr": 0.09797958971132711 }, "hendrycksTest-high_school_us_history": { "acc": 0.4, "acc_stderr": 0.10000000000000002, "acc_norm": 0.4, "acc_norm_stderr": 0.10000000000000002 }, "hendrycksTest-high_school_world_history": { "acc": 0.28, "acc_stderr": 0.0916515138991168, "acc_norm": 0.28, "acc_norm_stderr": 0.0916515138991168 }, "hendrycksTest-human_aging": { "acc": 0.2, "acc_stderr": 0.08164965809277261, "acc_norm": 0.2, "acc_norm_stderr": 0.08164965809277261 }, "hendrycksTest-human_sexuality": { "acc": 0.2, "acc_stderr": 0.08164965809277262, "acc_norm": 0.2, "acc_norm_stderr": 0.08164965809277262 }, "hendrycksTest-international_law": { "acc": 0.28, "acc_stderr": 0.09165151389911678, "acc_norm": 0.28, "acc_norm_stderr": 0.09165151389911678 }, "hendrycksTest-jurisprudence": { "acc": 0.16, "acc_stderr": 0.0748331477354788, "acc_norm": 0.16, "acc_norm_stderr": 0.0748331477354788 }, "hendrycksTest-logical_fallacies": { "acc": 0.2, "acc_stderr": 0.08164965809277261, "acc_norm": 0.2, "acc_norm_stderr": 0.08164965809277261 }, "hendrycksTest-machine_learning": { "acc": 0.28, "acc_stderr": 0.09165151389911678, "acc_norm": 0.28, "acc_norm_stderr": 0.09165151389911678 }, "hendrycksTest-management": { "acc": 0.16, "acc_stderr": 0.0748331477354788, "acc_norm": 0.16, "acc_norm_stderr": 0.0748331477354788 }, "hendrycksTest-marketing": { "acc": 0.16, "acc_stderr": 0.07483314773547879, "acc_norm": 0.16, "acc_norm_stderr": 0.07483314773547879 }, "hendrycksTest-medical_genetics": { "acc": 0.36, "acc_stderr": 0.09797958971132713, "acc_norm": 0.36, "acc_norm_stderr": 0.09797958971132713 }, "hendrycksTest-miscellaneous": { "acc": 0.32, "acc_stderr": 0.09521904571390466, "acc_norm": 0.32, "acc_norm_stderr": 0.09521904571390466 }, "hendrycksTest-moral_disputes": { "acc": 0.28, "acc_stderr": 0.0916515138991168, "acc_norm": 0.28, "acc_norm_stderr": 0.0916515138991168 }, "hendrycksTest-moral_scenarios": { "acc": 0.24, "acc_stderr": 0.08717797887081345, "acc_norm": 0.24, "acc_norm_stderr": 0.08717797887081345 }, "hendrycksTest-nutrition": { "acc": 0.2, "acc_stderr": 0.08164965809277261, "acc_norm": 0.2, "acc_norm_stderr": 0.08164965809277261 }, "hendrycksTest-philosophy": { "acc": 0.2, "acc_stderr": 0.08164965809277261, "acc_norm": 0.2, "acc_norm_stderr": 0.08164965809277261 }, "hendrycksTest-prehistory": { "acc": 0.24, "acc_stderr": 0.08717797887081345, "acc_norm": 0.24, "acc_norm_stderr": 0.08717797887081345 }, "hendrycksTest-professional_accounting": { "acc": 0.32, "acc_stderr": 0.09521904571390465, "acc_norm": 0.32, "acc_norm_stderr": 0.09521904571390465 }, "hendrycksTest-professional_law": { "acc": 0.24, "acc_stderr": 0.08717797887081347, "acc_norm": 0.24, "acc_norm_stderr": 0.08717797887081347 }, "hendrycksTest-professional_medicine": { "acc": 0.36, "acc_stderr": 0.09797958971132711, "acc_norm": 0.36, "acc_norm_stderr": 0.09797958971132711 }, "hendrycksTest-professional_psychology": { "acc": 0.2, "acc_stderr": 0.08164965809277261, "acc_norm": 0.2, "acc_norm_stderr": 0.08164965809277261 }, "hendrycksTest-public_relations": { "acc": 0.16, "acc_stderr": 0.0748331477354788, "acc_norm": 0.16, "acc_norm_stderr": 0.0748331477354788 }, "hendrycksTest-security_studies": { "acc": 0.28, "acc_stderr": 0.09165151389911678, "acc_norm": 0.28, "acc_norm_stderr": 0.09165151389911678 }, "hendrycksTest-sociology": { "acc": 0.12, "acc_stderr": 0.06633249580710801, "acc_norm": 0.12, "acc_norm_stderr": 0.06633249580710801 }, "hendrycksTest-us_foreign_policy": { "acc": 0.24, "acc_stderr": 0.08717797887081347, "acc_norm": 0.24, "acc_norm_stderr": 0.08717797887081347 }, "hendrycksTest-virology": { "acc": 0.2, "acc_stderr": 0.08164965809277261, "acc_norm": 0.2, "acc_norm_stderr": 0.08164965809277261 }, "hendrycksTest-world_religions": { "acc": 0.32, "acc_stderr": 0.09521904571390466, "acc_norm": 0.32, "acc_norm_stderr": 0.09521904571390466 } }, "versions": { "hendrycksTest-abstract_algebra": 1, "hendrycksTest-anatomy": 1, "hendrycksTest-astronomy": 1, "hendrycksTest-business_ethics": 1, "hendrycksTest-clinical_knowledge": 1, "hendrycksTest-college_biology": 1, "hendrycksTest-college_chemistry": 1, "hendrycksTest-college_computer_science": 1, "hendrycksTest-college_mathematics": 1, "hendrycksTest-college_medicine": 1, "hendrycksTest-college_physics": 1, "hendrycksTest-computer_security": 1, "hendrycksTest-conceptual_physics": 1, "hendrycksTest-econometrics": 1, "hendrycksTest-electrical_engineering": 1, "hendrycksTest-elementary_mathematics": 1, "hendrycksTest-formal_logic": 1, "hendrycksTest-global_facts": 1, "hendrycksTest-high_school_biology": 1, "hendrycksTest-high_school_chemistry": 1, "hendrycksTest-high_school_computer_science": 1, "hendrycksTest-high_school_european_history": 1, "hendrycksTest-high_school_geography": 1, "hendrycksTest-high_school_government_and_politics": 1, "hendrycksTest-high_school_macroeconomics": 1, "hendrycksTest-high_school_mathematics": 1, "hendrycksTest-high_school_microeconomics": 1, "hendrycksTest-high_school_physics": 1, "hendrycksTest-high_school_psychology": 1, "hendrycksTest-high_school_statistics": 1, "hendrycksTest-high_school_us_history": 1, "hendrycksTest-high_school_world_history": 1, "hendrycksTest-human_aging": 1, "hendrycksTest-human_sexuality": 1, "hendrycksTest-international_law": 1, "hendrycksTest-jurisprudence": 1, "hendrycksTest-logical_fallacies": 1, "hendrycksTest-machine_learning": 1, "hendrycksTest-management": 1, "hendrycksTest-marketing": 1, "hendrycksTest-medical_genetics": 1, "hendrycksTest-miscellaneous": 1, "hendrycksTest-moral_disputes": 1, "hendrycksTest-moral_scenarios": 1, "hendrycksTest-nutrition": 1, "hendrycksTest-philosophy": 1, "hendrycksTest-prehistory": 1, "hendrycksTest-professional_accounting": 1, "hendrycksTest-professional_law": 1, "hendrycksTest-professional_medicine": 1, "hendrycksTest-professional_psychology": 1, "hendrycksTest-public_relations": 1, "hendrycksTest-security_studies": 1, "hendrycksTest-sociology": 1, "hendrycksTest-us_foreign_policy": 1, "hendrycksTest-virology": 1, "hendrycksTest-world_religions": 1 }, "config": { "model": "hf-causal-experimental", "model_args": "pretrained=BEE-spoke-data/smol_llama-101M-GQA,revision=main,trust_remote_code=True,dtype='float'", "num_fewshot": 5, "batch_size": "8", "batch_sizes": [], "device": "cuda", "no_cache": false, "limit": 0.25, "bootstrap_iters": 100000, "description_dict": {} } }