|
{ |
|
"results": { |
|
"hendrycksTest-abstract_algebra": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542129, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542129 |
|
}, |
|
"hendrycksTest-anatomy": { |
|
"acc": 0.45925925925925926, |
|
"acc_stderr": 0.04304979692464242, |
|
"acc_norm": 0.45925925925925926, |
|
"acc_norm_stderr": 0.04304979692464242 |
|
}, |
|
"hendrycksTest-astronomy": { |
|
"acc": 0.47368421052631576, |
|
"acc_stderr": 0.040633027314866725, |
|
"acc_norm": 0.47368421052631576, |
|
"acc_norm_stderr": 0.040633027314866725 |
|
}, |
|
"hendrycksTest-business_ethics": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.05, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.05 |
|
}, |
|
"hendrycksTest-clinical_knowledge": { |
|
"acc": 0.4339622641509434, |
|
"acc_stderr": 0.030503292013342596, |
|
"acc_norm": 0.4339622641509434, |
|
"acc_norm_stderr": 0.030503292013342596 |
|
}, |
|
"hendrycksTest-college_biology": { |
|
"acc": 0.3680555555555556, |
|
"acc_stderr": 0.040329990539607195, |
|
"acc_norm": 0.3680555555555556, |
|
"acc_norm_stderr": 0.040329990539607195 |
|
}, |
|
"hendrycksTest-college_chemistry": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252605, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252605 |
|
}, |
|
"hendrycksTest-college_computer_science": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"hendrycksTest-college_mathematics": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.040201512610368466, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.040201512610368466 |
|
}, |
|
"hendrycksTest-college_medicine": { |
|
"acc": 0.3930635838150289, |
|
"acc_stderr": 0.0372424959581773, |
|
"acc_norm": 0.3930635838150289, |
|
"acc_norm_stderr": 0.0372424959581773 |
|
}, |
|
"hendrycksTest-college_physics": { |
|
"acc": 0.18627450980392157, |
|
"acc_stderr": 0.038739587141493524, |
|
"acc_norm": 0.18627450980392157, |
|
"acc_norm_stderr": 0.038739587141493524 |
|
}, |
|
"hendrycksTest-computer_security": { |
|
"acc": 0.59, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.59, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"hendrycksTest-conceptual_physics": { |
|
"acc": 0.37872340425531914, |
|
"acc_stderr": 0.03170995606040655, |
|
"acc_norm": 0.37872340425531914, |
|
"acc_norm_stderr": 0.03170995606040655 |
|
}, |
|
"hendrycksTest-econometrics": { |
|
"acc": 0.2894736842105263, |
|
"acc_stderr": 0.04266339443159394, |
|
"acc_norm": 0.2894736842105263, |
|
"acc_norm_stderr": 0.04266339443159394 |
|
}, |
|
"hendrycksTest-electrical_engineering": { |
|
"acc": 0.4206896551724138, |
|
"acc_stderr": 0.0411391498118926, |
|
"acc_norm": 0.4206896551724138, |
|
"acc_norm_stderr": 0.0411391498118926 |
|
}, |
|
"hendrycksTest-elementary_mathematics": { |
|
"acc": 0.30158730158730157, |
|
"acc_stderr": 0.02363697599610179, |
|
"acc_norm": 0.30158730158730157, |
|
"acc_norm_stderr": 0.02363697599610179 |
|
}, |
|
"hendrycksTest-formal_logic": { |
|
"acc": 0.25396825396825395, |
|
"acc_stderr": 0.03893259610604674, |
|
"acc_norm": 0.25396825396825395, |
|
"acc_norm_stderr": 0.03893259610604674 |
|
}, |
|
"hendrycksTest-global_facts": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"hendrycksTest-high_school_biology": { |
|
"acc": 0.4806451612903226, |
|
"acc_stderr": 0.02842268740431211, |
|
"acc_norm": 0.4806451612903226, |
|
"acc_norm_stderr": 0.02842268740431211 |
|
}, |
|
"hendrycksTest-high_school_chemistry": { |
|
"acc": 0.33004926108374383, |
|
"acc_stderr": 0.033085304262282574, |
|
"acc_norm": 0.33004926108374383, |
|
"acc_norm_stderr": 0.033085304262282574 |
|
}, |
|
"hendrycksTest-high_school_computer_science": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"hendrycksTest-high_school_european_history": { |
|
"acc": 0.5151515151515151, |
|
"acc_stderr": 0.03902551007374448, |
|
"acc_norm": 0.5151515151515151, |
|
"acc_norm_stderr": 0.03902551007374448 |
|
}, |
|
"hendrycksTest-high_school_geography": { |
|
"acc": 0.4444444444444444, |
|
"acc_stderr": 0.035402943770953675, |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.035402943770953675 |
|
}, |
|
"hendrycksTest-high_school_government_and_politics": { |
|
"acc": 0.5906735751295337, |
|
"acc_stderr": 0.03548608168860806, |
|
"acc_norm": 0.5906735751295337, |
|
"acc_norm_stderr": 0.03548608168860806 |
|
}, |
|
"hendrycksTest-high_school_macroeconomics": { |
|
"acc": 0.36666666666666664, |
|
"acc_stderr": 0.024433016466052455, |
|
"acc_norm": 0.36666666666666664, |
|
"acc_norm_stderr": 0.024433016466052455 |
|
}, |
|
"hendrycksTest-high_school_mathematics": { |
|
"acc": 0.27037037037037037, |
|
"acc_stderr": 0.02708037281514568, |
|
"acc_norm": 0.27037037037037037, |
|
"acc_norm_stderr": 0.02708037281514568 |
|
}, |
|
"hendrycksTest-high_school_microeconomics": { |
|
"acc": 0.38235294117647056, |
|
"acc_stderr": 0.03156663099215416, |
|
"acc_norm": 0.38235294117647056, |
|
"acc_norm_stderr": 0.03156663099215416 |
|
}, |
|
"hendrycksTest-high_school_physics": { |
|
"acc": 0.32450331125827814, |
|
"acc_stderr": 0.03822746937658754, |
|
"acc_norm": 0.32450331125827814, |
|
"acc_norm_stderr": 0.03822746937658754 |
|
}, |
|
"hendrycksTest-high_school_psychology": { |
|
"acc": 0.5027522935779817, |
|
"acc_stderr": 0.021436998359765317, |
|
"acc_norm": 0.5027522935779817, |
|
"acc_norm_stderr": 0.021436998359765317 |
|
}, |
|
"hendrycksTest-high_school_statistics": { |
|
"acc": 0.33796296296296297, |
|
"acc_stderr": 0.03225941352631295, |
|
"acc_norm": 0.33796296296296297, |
|
"acc_norm_stderr": 0.03225941352631295 |
|
}, |
|
"hendrycksTest-high_school_us_history": { |
|
"acc": 0.5637254901960784, |
|
"acc_stderr": 0.034806931384570396, |
|
"acc_norm": 0.5637254901960784, |
|
"acc_norm_stderr": 0.034806931384570396 |
|
}, |
|
"hendrycksTest-high_school_world_history": { |
|
"acc": 0.6329113924050633, |
|
"acc_stderr": 0.03137624072561619, |
|
"acc_norm": 0.6329113924050633, |
|
"acc_norm_stderr": 0.03137624072561619 |
|
}, |
|
"hendrycksTest-human_aging": { |
|
"acc": 0.4439461883408072, |
|
"acc_stderr": 0.03334625674242728, |
|
"acc_norm": 0.4439461883408072, |
|
"acc_norm_stderr": 0.03334625674242728 |
|
}, |
|
"hendrycksTest-human_sexuality": { |
|
"acc": 0.4351145038167939, |
|
"acc_stderr": 0.04348208051644858, |
|
"acc_norm": 0.4351145038167939, |
|
"acc_norm_stderr": 0.04348208051644858 |
|
}, |
|
"hendrycksTest-international_law": { |
|
"acc": 0.5619834710743802, |
|
"acc_stderr": 0.045291468044357915, |
|
"acc_norm": 0.5619834710743802, |
|
"acc_norm_stderr": 0.045291468044357915 |
|
}, |
|
"hendrycksTest-jurisprudence": { |
|
"acc": 0.48148148148148145, |
|
"acc_stderr": 0.04830366024635331, |
|
"acc_norm": 0.48148148148148145, |
|
"acc_norm_stderr": 0.04830366024635331 |
|
}, |
|
"hendrycksTest-logical_fallacies": { |
|
"acc": 0.4539877300613497, |
|
"acc_stderr": 0.0391170190467718, |
|
"acc_norm": 0.4539877300613497, |
|
"acc_norm_stderr": 0.0391170190467718 |
|
}, |
|
"hendrycksTest-machine_learning": { |
|
"acc": 0.2857142857142857, |
|
"acc_stderr": 0.04287858751340456, |
|
"acc_norm": 0.2857142857142857, |
|
"acc_norm_stderr": 0.04287858751340456 |
|
}, |
|
"hendrycksTest-management": { |
|
"acc": 0.49514563106796117, |
|
"acc_stderr": 0.049505043821289195, |
|
"acc_norm": 0.49514563106796117, |
|
"acc_norm_stderr": 0.049505043821289195 |
|
}, |
|
"hendrycksTest-marketing": { |
|
"acc": 0.594017094017094, |
|
"acc_stderr": 0.03217180182641087, |
|
"acc_norm": 0.594017094017094, |
|
"acc_norm_stderr": 0.03217180182641087 |
|
}, |
|
"hendrycksTest-medical_genetics": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"hendrycksTest-miscellaneous": { |
|
"acc": 0.5530012771392082, |
|
"acc_stderr": 0.017779225233394223, |
|
"acc_norm": 0.5530012771392082, |
|
"acc_norm_stderr": 0.017779225233394223 |
|
}, |
|
"hendrycksTest-moral_disputes": { |
|
"acc": 0.4479768786127168, |
|
"acc_stderr": 0.026772990653361816, |
|
"acc_norm": 0.4479768786127168, |
|
"acc_norm_stderr": 0.026772990653361816 |
|
}, |
|
"hendrycksTest-moral_scenarios": { |
|
"acc": 0.2558659217877095, |
|
"acc_stderr": 0.014593620923210728, |
|
"acc_norm": 0.2558659217877095, |
|
"acc_norm_stderr": 0.014593620923210728 |
|
}, |
|
"hendrycksTest-nutrition": { |
|
"acc": 0.49019607843137253, |
|
"acc_stderr": 0.02862441255016795, |
|
"acc_norm": 0.49019607843137253, |
|
"acc_norm_stderr": 0.02862441255016795 |
|
}, |
|
"hendrycksTest-philosophy": { |
|
"acc": 0.5401929260450161, |
|
"acc_stderr": 0.028306190403305696, |
|
"acc_norm": 0.5401929260450161, |
|
"acc_norm_stderr": 0.028306190403305696 |
|
}, |
|
"hendrycksTest-prehistory": { |
|
"acc": 0.49074074074074076, |
|
"acc_stderr": 0.027815973433878014, |
|
"acc_norm": 0.49074074074074076, |
|
"acc_norm_stderr": 0.027815973433878014 |
|
}, |
|
"hendrycksTest-professional_accounting": { |
|
"acc": 0.31560283687943264, |
|
"acc_stderr": 0.02772498944950931, |
|
"acc_norm": 0.31560283687943264, |
|
"acc_norm_stderr": 0.02772498944950931 |
|
}, |
|
"hendrycksTest-professional_law": { |
|
"acc": 0.3409387222946545, |
|
"acc_stderr": 0.01210681720306721, |
|
"acc_norm": 0.3409387222946545, |
|
"acc_norm_stderr": 0.01210681720306721 |
|
}, |
|
"hendrycksTest-professional_medicine": { |
|
"acc": 0.39338235294117646, |
|
"acc_stderr": 0.029674288281311172, |
|
"acc_norm": 0.39338235294117646, |
|
"acc_norm_stderr": 0.029674288281311172 |
|
}, |
|
"hendrycksTest-professional_psychology": { |
|
"acc": 0.4215686274509804, |
|
"acc_stderr": 0.01997742260022747, |
|
"acc_norm": 0.4215686274509804, |
|
"acc_norm_stderr": 0.01997742260022747 |
|
}, |
|
"hendrycksTest-public_relations": { |
|
"acc": 0.42727272727272725, |
|
"acc_stderr": 0.04738198703545483, |
|
"acc_norm": 0.42727272727272725, |
|
"acc_norm_stderr": 0.04738198703545483 |
|
}, |
|
"hendrycksTest-security_studies": { |
|
"acc": 0.43673469387755104, |
|
"acc_stderr": 0.031751952375833226, |
|
"acc_norm": 0.43673469387755104, |
|
"acc_norm_stderr": 0.031751952375833226 |
|
}, |
|
"hendrycksTest-sociology": { |
|
"acc": 0.5572139303482587, |
|
"acc_stderr": 0.03512310964123936, |
|
"acc_norm": 0.5572139303482587, |
|
"acc_norm_stderr": 0.03512310964123936 |
|
}, |
|
"hendrycksTest-us_foreign_policy": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"hendrycksTest-virology": { |
|
"acc": 0.3855421686746988, |
|
"acc_stderr": 0.037891344246115496, |
|
"acc_norm": 0.3855421686746988, |
|
"acc_norm_stderr": 0.037891344246115496 |
|
}, |
|
"hendrycksTest-world_religions": { |
|
"acc": 0.5672514619883041, |
|
"acc_stderr": 0.03799978644370607, |
|
"acc_norm": 0.5672514619883041, |
|
"acc_norm_stderr": 0.03799978644370607 |
|
} |
|
}, |
|
"versions": { |
|
"hendrycksTest-abstract_algebra": 1, |
|
"hendrycksTest-anatomy": 1, |
|
"hendrycksTest-astronomy": 1, |
|
"hendrycksTest-business_ethics": 1, |
|
"hendrycksTest-clinical_knowledge": 1, |
|
"hendrycksTest-college_biology": 1, |
|
"hendrycksTest-college_chemistry": 1, |
|
"hendrycksTest-college_computer_science": 1, |
|
"hendrycksTest-college_mathematics": 1, |
|
"hendrycksTest-college_medicine": 1, |
|
"hendrycksTest-college_physics": 1, |
|
"hendrycksTest-computer_security": 1, |
|
"hendrycksTest-conceptual_physics": 1, |
|
"hendrycksTest-econometrics": 1, |
|
"hendrycksTest-electrical_engineering": 1, |
|
"hendrycksTest-elementary_mathematics": 1, |
|
"hendrycksTest-formal_logic": 1, |
|
"hendrycksTest-global_facts": 1, |
|
"hendrycksTest-high_school_biology": 1, |
|
"hendrycksTest-high_school_chemistry": 1, |
|
"hendrycksTest-high_school_computer_science": 1, |
|
"hendrycksTest-high_school_european_history": 1, |
|
"hendrycksTest-high_school_geography": 1, |
|
"hendrycksTest-high_school_government_and_politics": 1, |
|
"hendrycksTest-high_school_macroeconomics": 1, |
|
"hendrycksTest-high_school_mathematics": 1, |
|
"hendrycksTest-high_school_microeconomics": 1, |
|
"hendrycksTest-high_school_physics": 1, |
|
"hendrycksTest-high_school_psychology": 1, |
|
"hendrycksTest-high_school_statistics": 1, |
|
"hendrycksTest-high_school_us_history": 1, |
|
"hendrycksTest-high_school_world_history": 1, |
|
"hendrycksTest-human_aging": 1, |
|
"hendrycksTest-human_sexuality": 1, |
|
"hendrycksTest-international_law": 1, |
|
"hendrycksTest-jurisprudence": 1, |
|
"hendrycksTest-logical_fallacies": 1, |
|
"hendrycksTest-machine_learning": 1, |
|
"hendrycksTest-management": 1, |
|
"hendrycksTest-marketing": 1, |
|
"hendrycksTest-medical_genetics": 1, |
|
"hendrycksTest-miscellaneous": 1, |
|
"hendrycksTest-moral_disputes": 1, |
|
"hendrycksTest-moral_scenarios": 1, |
|
"hendrycksTest-nutrition": 1, |
|
"hendrycksTest-philosophy": 1, |
|
"hendrycksTest-prehistory": 1, |
|
"hendrycksTest-professional_accounting": 1, |
|
"hendrycksTest-professional_law": 1, |
|
"hendrycksTest-professional_medicine": 1, |
|
"hendrycksTest-professional_psychology": 1, |
|
"hendrycksTest-public_relations": 1, |
|
"hendrycksTest-security_studies": 1, |
|
"hendrycksTest-sociology": 1, |
|
"hendrycksTest-us_foreign_policy": 1, |
|
"hendrycksTest-virology": 1, |
|
"hendrycksTest-world_religions": 1 |
|
}, |
|
"config": { |
|
"model": "sparseml", |
|
"model_args": "pretrained=/cerebras/experiments/spft-retrained_sparse70_llama2_DATAUpdated_KDFalse_GCTrue_LR1e-4_E6-/combined/,trust_remote_code=True", |
|
"num_fewshot": 5, |
|
"batch_size": "6", |
|
"batch_sizes": [], |
|
"device": "cuda:0", |
|
"no_cache": true, |
|
"limit": null, |
|
"bootstrap_iters": 100000, |
|
"description_dict": {} |
|
} |
|
} |