shubhrapandit's picture
Update model files and evals
e05bde7
{
"results": {
"hendrycksTest-abstract_algebra": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"hendrycksTest-anatomy": {
"acc": 0.4666666666666667,
"acc_stderr": 0.043097329010363554,
"acc_norm": 0.4666666666666667,
"acc_norm_stderr": 0.043097329010363554
},
"hendrycksTest-astronomy": {
"acc": 0.5263157894736842,
"acc_stderr": 0.04063302731486671,
"acc_norm": 0.5263157894736842,
"acc_norm_stderr": 0.04063302731486671
},
"hendrycksTest-business_ethics": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"hendrycksTest-clinical_knowledge": {
"acc": 0.4867924528301887,
"acc_stderr": 0.030762134874500482,
"acc_norm": 0.4867924528301887,
"acc_norm_stderr": 0.030762134874500482
},
"hendrycksTest-college_biology": {
"acc": 0.4513888888888889,
"acc_stderr": 0.04161402398403279,
"acc_norm": 0.4513888888888889,
"acc_norm_stderr": 0.04161402398403279
},
"hendrycksTest-college_chemistry": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"hendrycksTest-college_computer_science": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"hendrycksTest-college_mathematics": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"hendrycksTest-college_medicine": {
"acc": 0.3988439306358382,
"acc_stderr": 0.03733626655383509,
"acc_norm": 0.3988439306358382,
"acc_norm_stderr": 0.03733626655383509
},
"hendrycksTest-college_physics": {
"acc": 0.23529411764705882,
"acc_stderr": 0.042207736591714534,
"acc_norm": 0.23529411764705882,
"acc_norm_stderr": 0.042207736591714534
},
"hendrycksTest-computer_security": {
"acc": 0.62,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.62,
"acc_norm_stderr": 0.048783173121456316
},
"hendrycksTest-conceptual_physics": {
"acc": 0.3659574468085106,
"acc_stderr": 0.03148955829745529,
"acc_norm": 0.3659574468085106,
"acc_norm_stderr": 0.03148955829745529
},
"hendrycksTest-econometrics": {
"acc": 0.2807017543859649,
"acc_stderr": 0.042270544512322,
"acc_norm": 0.2807017543859649,
"acc_norm_stderr": 0.042270544512322
},
"hendrycksTest-electrical_engineering": {
"acc": 0.41379310344827586,
"acc_stderr": 0.04104269211806232,
"acc_norm": 0.41379310344827586,
"acc_norm_stderr": 0.04104269211806232
},
"hendrycksTest-elementary_mathematics": {
"acc": 0.2962962962962963,
"acc_stderr": 0.023517294335963283,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.023517294335963283
},
"hendrycksTest-formal_logic": {
"acc": 0.2619047619047619,
"acc_stderr": 0.0393253768039287,
"acc_norm": 0.2619047619047619,
"acc_norm_stderr": 0.0393253768039287
},
"hendrycksTest-global_facts": {
"acc": 0.34,
"acc_stderr": 0.047609522856952365,
"acc_norm": 0.34,
"acc_norm_stderr": 0.047609522856952365
},
"hendrycksTest-high_school_biology": {
"acc": 0.5193548387096775,
"acc_stderr": 0.028422687404312107,
"acc_norm": 0.5193548387096775,
"acc_norm_stderr": 0.028422687404312107
},
"hendrycksTest-high_school_chemistry": {
"acc": 0.3448275862068966,
"acc_stderr": 0.03344283744280458,
"acc_norm": 0.3448275862068966,
"acc_norm_stderr": 0.03344283744280458
},
"hendrycksTest-high_school_computer_science": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.44,
"acc_norm_stderr": 0.04988876515698589
},
"hendrycksTest-high_school_european_history": {
"acc": 0.5696969696969697,
"acc_stderr": 0.03866225962879077,
"acc_norm": 0.5696969696969697,
"acc_norm_stderr": 0.03866225962879077
},
"hendrycksTest-high_school_geography": {
"acc": 0.5050505050505051,
"acc_stderr": 0.035621707606254015,
"acc_norm": 0.5050505050505051,
"acc_norm_stderr": 0.035621707606254015
},
"hendrycksTest-high_school_government_and_politics": {
"acc": 0.6528497409326425,
"acc_stderr": 0.03435696168361356,
"acc_norm": 0.6528497409326425,
"acc_norm_stderr": 0.03435696168361356
},
"hendrycksTest-high_school_macroeconomics": {
"acc": 0.40512820512820513,
"acc_stderr": 0.024890471769938145,
"acc_norm": 0.40512820512820513,
"acc_norm_stderr": 0.024890471769938145
},
"hendrycksTest-high_school_mathematics": {
"acc": 0.2518518518518518,
"acc_stderr": 0.026466117538959916,
"acc_norm": 0.2518518518518518,
"acc_norm_stderr": 0.026466117538959916
},
"hendrycksTest-high_school_microeconomics": {
"acc": 0.42857142857142855,
"acc_stderr": 0.032145368597886394,
"acc_norm": 0.42857142857142855,
"acc_norm_stderr": 0.032145368597886394
},
"hendrycksTest-high_school_physics": {
"acc": 0.271523178807947,
"acc_stderr": 0.03631329803969654,
"acc_norm": 0.271523178807947,
"acc_norm_stderr": 0.03631329803969654
},
"hendrycksTest-high_school_psychology": {
"acc": 0.6,
"acc_stderr": 0.021004201260420078,
"acc_norm": 0.6,
"acc_norm_stderr": 0.021004201260420078
},
"hendrycksTest-high_school_statistics": {
"acc": 0.37962962962962965,
"acc_stderr": 0.03309682581119035,
"acc_norm": 0.37962962962962965,
"acc_norm_stderr": 0.03309682581119035
},
"hendrycksTest-high_school_us_history": {
"acc": 0.5882352941176471,
"acc_stderr": 0.03454236585380608,
"acc_norm": 0.5882352941176471,
"acc_norm_stderr": 0.03454236585380608
},
"hendrycksTest-high_school_world_history": {
"acc": 0.6666666666666666,
"acc_stderr": 0.0306858205966108,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.0306858205966108
},
"hendrycksTest-human_aging": {
"acc": 0.57847533632287,
"acc_stderr": 0.033141902221106564,
"acc_norm": 0.57847533632287,
"acc_norm_stderr": 0.033141902221106564
},
"hendrycksTest-human_sexuality": {
"acc": 0.549618320610687,
"acc_stderr": 0.04363643698524779,
"acc_norm": 0.549618320610687,
"acc_norm_stderr": 0.04363643698524779
},
"hendrycksTest-international_law": {
"acc": 0.6859504132231405,
"acc_stderr": 0.04236964753041018,
"acc_norm": 0.6859504132231405,
"acc_norm_stderr": 0.04236964753041018
},
"hendrycksTest-jurisprudence": {
"acc": 0.5185185185185185,
"acc_stderr": 0.04830366024635331,
"acc_norm": 0.5185185185185185,
"acc_norm_stderr": 0.04830366024635331
},
"hendrycksTest-logical_fallacies": {
"acc": 0.49079754601226994,
"acc_stderr": 0.03927705600787443,
"acc_norm": 0.49079754601226994,
"acc_norm_stderr": 0.03927705600787443
},
"hendrycksTest-machine_learning": {
"acc": 0.33035714285714285,
"acc_stderr": 0.04464285714285714,
"acc_norm": 0.33035714285714285,
"acc_norm_stderr": 0.04464285714285714
},
"hendrycksTest-management": {
"acc": 0.6116504854368932,
"acc_stderr": 0.048257293373563895,
"acc_norm": 0.6116504854368932,
"acc_norm_stderr": 0.048257293373563895
},
"hendrycksTest-marketing": {
"acc": 0.6538461538461539,
"acc_stderr": 0.0311669573672359,
"acc_norm": 0.6538461538461539,
"acc_norm_stderr": 0.0311669573672359
},
"hendrycksTest-medical_genetics": {
"acc": 0.45,
"acc_stderr": 0.05,
"acc_norm": 0.45,
"acc_norm_stderr": 0.05
},
"hendrycksTest-miscellaneous": {
"acc": 0.6475095785440613,
"acc_stderr": 0.01708415024408138,
"acc_norm": 0.6475095785440613,
"acc_norm_stderr": 0.01708415024408138
},
"hendrycksTest-moral_disputes": {
"acc": 0.5346820809248555,
"acc_stderr": 0.026854257928258875,
"acc_norm": 0.5346820809248555,
"acc_norm_stderr": 0.026854257928258875
},
"hendrycksTest-moral_scenarios": {
"acc": 0.2837988826815642,
"acc_stderr": 0.015078358970751764,
"acc_norm": 0.2837988826815642,
"acc_norm_stderr": 0.015078358970751764
},
"hendrycksTest-nutrition": {
"acc": 0.48366013071895425,
"acc_stderr": 0.028614624752805413,
"acc_norm": 0.48366013071895425,
"acc_norm_stderr": 0.028614624752805413
},
"hendrycksTest-philosophy": {
"acc": 0.5562700964630225,
"acc_stderr": 0.02821768355665231,
"acc_norm": 0.5562700964630225,
"acc_norm_stderr": 0.02821768355665231
},
"hendrycksTest-prehistory": {
"acc": 0.5370370370370371,
"acc_stderr": 0.027744313443376536,
"acc_norm": 0.5370370370370371,
"acc_norm_stderr": 0.027744313443376536
},
"hendrycksTest-professional_accounting": {
"acc": 0.36524822695035464,
"acc_stderr": 0.028723863853281285,
"acc_norm": 0.36524822695035464,
"acc_norm_stderr": 0.028723863853281285
},
"hendrycksTest-professional_law": {
"acc": 0.37614080834419816,
"acc_stderr": 0.012372214430599816,
"acc_norm": 0.37614080834419816,
"acc_norm_stderr": 0.012372214430599816
},
"hendrycksTest-professional_medicine": {
"acc": 0.44485294117647056,
"acc_stderr": 0.030187532060329387,
"acc_norm": 0.44485294117647056,
"acc_norm_stderr": 0.030187532060329387
},
"hendrycksTest-professional_psychology": {
"acc": 0.4411764705882353,
"acc_stderr": 0.020087362076702857,
"acc_norm": 0.4411764705882353,
"acc_norm_stderr": 0.020087362076702857
},
"hendrycksTest-public_relations": {
"acc": 0.509090909090909,
"acc_stderr": 0.0478833976870286,
"acc_norm": 0.509090909090909,
"acc_norm_stderr": 0.0478833976870286
},
"hendrycksTest-security_studies": {
"acc": 0.5224489795918368,
"acc_stderr": 0.031976941187136725,
"acc_norm": 0.5224489795918368,
"acc_norm_stderr": 0.031976941187136725
},
"hendrycksTest-sociology": {
"acc": 0.6119402985074627,
"acc_stderr": 0.0344578996436275,
"acc_norm": 0.6119402985074627,
"acc_norm_stderr": 0.0344578996436275
},
"hendrycksTest-us_foreign_policy": {
"acc": 0.73,
"acc_stderr": 0.0446196043338474,
"acc_norm": 0.73,
"acc_norm_stderr": 0.0446196043338474
},
"hendrycksTest-virology": {
"acc": 0.39759036144578314,
"acc_stderr": 0.038099730845402184,
"acc_norm": 0.39759036144578314,
"acc_norm_stderr": 0.038099730845402184
},
"hendrycksTest-world_religions": {
"acc": 0.7076023391812866,
"acc_stderr": 0.03488647713457922,
"acc_norm": 0.7076023391812866,
"acc_norm_stderr": 0.03488647713457922
}
},
"versions": {
"hendrycksTest-abstract_algebra": 1,
"hendrycksTest-anatomy": 1,
"hendrycksTest-astronomy": 1,
"hendrycksTest-business_ethics": 1,
"hendrycksTest-clinical_knowledge": 1,
"hendrycksTest-college_biology": 1,
"hendrycksTest-college_chemistry": 1,
"hendrycksTest-college_computer_science": 1,
"hendrycksTest-college_mathematics": 1,
"hendrycksTest-college_medicine": 1,
"hendrycksTest-college_physics": 1,
"hendrycksTest-computer_security": 1,
"hendrycksTest-conceptual_physics": 1,
"hendrycksTest-econometrics": 1,
"hendrycksTest-electrical_engineering": 1,
"hendrycksTest-elementary_mathematics": 1,
"hendrycksTest-formal_logic": 1,
"hendrycksTest-global_facts": 1,
"hendrycksTest-high_school_biology": 1,
"hendrycksTest-high_school_chemistry": 1,
"hendrycksTest-high_school_computer_science": 1,
"hendrycksTest-high_school_european_history": 1,
"hendrycksTest-high_school_geography": 1,
"hendrycksTest-high_school_government_and_politics": 1,
"hendrycksTest-high_school_macroeconomics": 1,
"hendrycksTest-high_school_mathematics": 1,
"hendrycksTest-high_school_microeconomics": 1,
"hendrycksTest-high_school_physics": 1,
"hendrycksTest-high_school_psychology": 1,
"hendrycksTest-high_school_statistics": 1,
"hendrycksTest-high_school_us_history": 1,
"hendrycksTest-high_school_world_history": 1,
"hendrycksTest-human_aging": 1,
"hendrycksTest-human_sexuality": 1,
"hendrycksTest-international_law": 1,
"hendrycksTest-jurisprudence": 1,
"hendrycksTest-logical_fallacies": 1,
"hendrycksTest-machine_learning": 1,
"hendrycksTest-management": 1,
"hendrycksTest-marketing": 1,
"hendrycksTest-medical_genetics": 1,
"hendrycksTest-miscellaneous": 1,
"hendrycksTest-moral_disputes": 1,
"hendrycksTest-moral_scenarios": 1,
"hendrycksTest-nutrition": 1,
"hendrycksTest-philosophy": 1,
"hendrycksTest-prehistory": 1,
"hendrycksTest-professional_accounting": 1,
"hendrycksTest-professional_law": 1,
"hendrycksTest-professional_medicine": 1,
"hendrycksTest-professional_psychology": 1,
"hendrycksTest-public_relations": 1,
"hendrycksTest-security_studies": 1,
"hendrycksTest-sociology": 1,
"hendrycksTest-us_foreign_policy": 1,
"hendrycksTest-virology": 1,
"hendrycksTest-world_religions": 1
},
"config": {
"model": "hf",
"model_args": "pretrained=/nm/drive1/shubhra/cerebras/experiments/spft-cerebras_llama2_sparse50_45B_platypus_dolphin_KDFalse_GCTrue_LR1e-4_E2/combined/,trust_remote_code=True,dtype=bfloat16",
"num_fewshot": 5,
"batch_size": "16",
"batch_sizes": [],
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}