{ "config_general": { "lighteval_sha": "1.4", "num_few_shot_default": null, "num_fewshot_seeds": null, "override_batch_size": null, "max_samples": null, "job_id": -1, "start_time": null, "end_time": "2024-05-02-16-52-29", "total_evaluation_time_secondes": "", "model_name": "TheBloke/SOLAR-10.7B-Instruct-v1.0-AWQ", "model_sha": "", "model_dtype": "4bit", "model_size": 5.96, "model_params": 10.55, "quant_type": "AWQ", "precision": "4bit" }, "results": { "harness|arc:challenge|0": { "acc,none": 0.5981228668941979, "acc_stderr,none": 0.014327268614578274, "acc_norm,none": 0.6322525597269625, "acc_norm_stderr,none": 0.014090995618168473, "alias": "arc_challenge" }, "harness|openbookqa|0": { "acc,none": 0.374, "acc_stderr,none": 0.021660710347204487, "acc_norm,none": 0.494, "acc_norm_stderr,none": 0.022381462412439324, "alias": "openbookqa" }, "harness|truthfulqa:mc1|0": { "acc,none": 0.5471236230110159, "acc_stderr,none": 0.01742558984831402, "alias": "truthfulqa_mc1" }, "harness|piqa|0": { "acc,none": 0.8112078346028292, "acc_stderr,none": 0.009130687388952816, "acc_norm,none": 0.8144722524483133, "acc_norm_stderr,none": 0.009069597302603996, "alias": "piqa" }, "harness|boolq|0": { "acc,none": 0.8798165137614679, "acc_stderr,none": 0.005687363587870172, "alias": "boolq" }, "harness|arc:easy|0": { "acc,none": 0.8308080808080808, "acc_stderr,none": 0.007693223639488826, "acc_norm,none": 0.8101851851851852, "acc_norm_stderr,none": 0.008046840527852234, "alias": "arc_easy" }, "harness|lambada:openai|0": { "perplexity,none": 3.185362930040927, "perplexity_stderr,none": 0.07406409479719334, "acc,none": 0.7279254803027363, "acc_stderr,none": 0.006200111064998447, "alias": "lambada_openai" }, "harness|mmlu|0": { "acc,none": 0.6232730380287709, "acc_stderr,none": 0.0038533964574598407, "alias": "mmlu" }, "harness|mmlu_humanities|0": { "alias": " - humanities", "acc,none": 0.5727948990435706, "acc_stderr,none": 0.006732570609347105 }, "harness|mmlu_formal_logic|0": { "alias": " - formal_logic", "acc,none": 0.373015873015873, "acc_stderr,none": 0.04325506042017086 }, "harness|mmlu_high_school_european_history|0": { "alias": " - high_school_european_history", "acc,none": 0.806060606060606, "acc_stderr,none": 0.030874145136562097 }, "harness|mmlu_high_school_us_history|0": { "alias": " - high_school_us_history", "acc,none": 0.8333333333333334, "acc_stderr,none": 0.026156867523931055 }, "harness|mmlu_high_school_world_history|0": { "alias": " - high_school_world_history", "acc,none": 0.8270042194092827, "acc_stderr,none": 0.024621562866768445 }, "harness|mmlu_international_law|0": { "alias": " - international_law", "acc,none": 0.8264462809917356, "acc_stderr,none": 0.0345727283691767 }, "harness|mmlu_jurisprudence|0": { "alias": " - jurisprudence", "acc,none": 0.7685185185185185, "acc_stderr,none": 0.04077494709252626 }, "harness|mmlu_logical_fallacies|0": { "alias": " - logical_fallacies", "acc,none": 0.6993865030674846, "acc_stderr,none": 0.03602511318806771 }, "harness|mmlu_moral_disputes|0": { "alias": " - moral_disputes", "acc,none": 0.7023121387283237, "acc_stderr,none": 0.024617055388676982 }, "harness|mmlu_moral_scenarios|0": { "alias": " - moral_scenarios", "acc,none": 0.3307262569832402, "acc_stderr,none": 0.01573502625896612 }, "harness|mmlu_philosophy|0": { "alias": " - philosophy", "acc,none": 0.6945337620578779, "acc_stderr,none": 0.026160584450140453 }, "harness|mmlu_prehistory|0": { "alias": " - prehistory", "acc,none": 0.7438271604938271, "acc_stderr,none": 0.0242885336377261 }, "harness|mmlu_professional_law|0": { "alias": " - professional_law", "acc,none": 0.47392438070404175, "acc_stderr,none": 0.012752858346533134 }, "harness|mmlu_world_religions|0": { "alias": " - world_religions", "acc,none": 0.7543859649122807, "acc_stderr,none": 0.033014059469872487 }, "harness|mmlu_other|0": { "alias": " - other", "acc,none": 0.7016414547795301, "acc_stderr,none": 0.007934723097613417 }, "harness|mmlu_business_ethics|0": { "alias": " - business_ethics", "acc,none": 0.66, "acc_stderr,none": 0.04760952285695237 }, "harness|mmlu_clinical_knowledge|0": { "alias": " - clinical_knowledge", "acc,none": 0.6867924528301886, "acc_stderr,none": 0.028544793319055326 }, "harness|mmlu_college_medicine|0": { "alias": " - college_medicine", "acc,none": 0.6473988439306358, "acc_stderr,none": 0.036430371689585475 }, "harness|mmlu_global_facts|0": { "alias": " - global_facts", "acc,none": 0.36, "acc_stderr,none": 0.04824181513244218 }, "harness|mmlu_human_aging|0": { "alias": " - human_aging", "acc,none": 0.695067264573991, "acc_stderr,none": 0.030898610882477515 }, "harness|mmlu_management|0": { "alias": " - management", "acc,none": 0.8155339805825242, "acc_stderr,none": 0.03840423627288276 }, "harness|mmlu_marketing|0": { "alias": " - marketing", "acc,none": 0.8547008547008547, "acc_stderr,none": 0.023086635086841407 }, "harness|mmlu_medical_genetics|0": { "alias": " - medical_genetics", "acc,none": 0.73, "acc_stderr,none": 0.044619604333847394 }, "harness|mmlu_miscellaneous|0": { "alias": " - miscellaneous", "acc,none": 0.80970625798212, "acc_stderr,none": 0.01403694585038138 }, "harness|mmlu_nutrition|0": { "alias": " - nutrition", "acc,none": 0.7156862745098039, "acc_stderr,none": 0.025829163272757468 }, "harness|mmlu_professional_accounting|0": { "alias": " - professional_accounting", "acc,none": 0.5035460992907801, "acc_stderr,none": 0.02982674915328092 }, "harness|mmlu_professional_medicine|0": { "alias": " - professional_medicine", "acc,none": 0.7022058823529411, "acc_stderr,none": 0.027778298701545443 }, "harness|mmlu_virology|0": { "alias": " - virology", "acc,none": 0.5180722891566265, "acc_stderr,none": 0.038899512528272166 }, "harness|mmlu_social_sciences|0": { "alias": " - social_sciences", "acc,none": 0.7237569060773481, "acc_stderr,none": 0.007875459074235897 }, "harness|mmlu_econometrics|0": { "alias": " - econometrics", "acc,none": 0.47368421052631576, "acc_stderr,none": 0.046970851366478626 }, "harness|mmlu_high_school_geography|0": { "alias": " - high_school_geography", "acc,none": 0.803030303030303, "acc_stderr,none": 0.02833560973246336 }, "harness|mmlu_high_school_government_and_politics|0": { "alias": " - high_school_government_and_politics", "acc,none": 0.8808290155440415, "acc_stderr,none": 0.02338193534812142 }, "harness|mmlu_high_school_macroeconomics|0": { "alias": " - high_school_macroeconomics", "acc,none": 0.6358974358974359, "acc_stderr,none": 0.024396672985094767 }, "harness|mmlu_high_school_microeconomics|0": { "alias": " - high_school_microeconomics", "acc,none": 0.6764705882352942, "acc_stderr,none": 0.03038835355188678 }, "harness|mmlu_high_school_psychology|0": { "alias": " - high_school_psychology", "acc,none": 0.8201834862385321, "acc_stderr,none": 0.016465345467391534 }, "harness|mmlu_human_sexuality|0": { "alias": " - human_sexuality", "acc,none": 0.7480916030534351, "acc_stderr,none": 0.038073871163060866 }, "harness|mmlu_professional_psychology|0": { "alias": " - professional_psychology", "acc,none": 0.6519607843137255, "acc_stderr,none": 0.01927099870822398 }, "harness|mmlu_public_relations|0": { "alias": " - public_relations", "acc,none": 0.6272727272727273, "acc_stderr,none": 0.04631381319425465 }, "harness|mmlu_security_studies|0": { "alias": " - security_studies", "acc,none": 0.6938775510204082, "acc_stderr,none": 0.02950489645459596 }, "harness|mmlu_sociology|0": { "alias": " - sociology", "acc,none": 0.8208955223880597, "acc_stderr,none": 0.027113286753111837 }, "harness|mmlu_us_foreign_policy|0": { "alias": " - us_foreign_policy", "acc,none": 0.87, "acc_stderr,none": 0.03379976689896309 }, "harness|mmlu_stem|0": { "alias": " - stem", "acc,none": 0.5233111322549953, "acc_stderr,none": 0.008565620869325358 }, "harness|mmlu_abstract_algebra|0": { "alias": " - abstract_algebra", "acc,none": 0.35, "acc_stderr,none": 0.0479372485441102 }, "harness|mmlu_anatomy|0": { "alias": " - anatomy", "acc,none": 0.5851851851851851, "acc_stderr,none": 0.04256193767901408 }, "harness|mmlu_astronomy|0": { "alias": " - astronomy", "acc,none": 0.7039473684210527, "acc_stderr,none": 0.037150621549989056 }, "harness|mmlu_college_biology|0": { "alias": " - college_biology", "acc,none": 0.7291666666666666, "acc_stderr,none": 0.037161774375660164 }, "harness|mmlu_college_chemistry|0": { "alias": " - college_chemistry", "acc,none": 0.4, "acc_stderr,none": 0.04923659639173309 }, "harness|mmlu_college_computer_science|0": { "alias": " - college_computer_science", "acc,none": 0.5, "acc_stderr,none": 0.050251890762960605 }, "harness|mmlu_college_mathematics|0": { "alias": " - college_mathematics", "acc,none": 0.29, "acc_stderr,none": 0.04560480215720684 }, "harness|mmlu_college_physics|0": { "alias": " - college_physics", "acc,none": 0.38235294117647056, "acc_stderr,none": 0.04835503696107224 }, "harness|mmlu_computer_security|0": { "alias": " - computer_security", "acc,none": 0.71, "acc_stderr,none": 0.045604802157206845 }, "harness|mmlu_conceptual_physics|0": { "alias": " - conceptual_physics", "acc,none": 0.5617021276595745, "acc_stderr,none": 0.03243618636108101 }, "harness|mmlu_electrical_engineering|0": { "alias": " - electrical_engineering", "acc,none": 0.5379310344827586, "acc_stderr,none": 0.04154659671707548 }, "harness|mmlu_elementary_mathematics|0": { "alias": " - elementary_mathematics", "acc,none": 0.4470899470899471, "acc_stderr,none": 0.025606723995777025 }, "harness|mmlu_high_school_biology|0": { "alias": " - high_school_biology", "acc,none": 0.7677419354838709, "acc_stderr,none": 0.02402225613030824 }, "harness|mmlu_high_school_chemistry|0": { "alias": " - high_school_chemistry", "acc,none": 0.458128078817734, "acc_stderr,none": 0.03505630140785741 }, "harness|mmlu_high_school_computer_science|0": { "alias": " - high_school_computer_science", "acc,none": 0.62, "acc_stderr,none": 0.048783173121456316 }, "harness|mmlu_high_school_mathematics|0": { "alias": " - high_school_mathematics", "acc,none": 0.362962962962963, "acc_stderr,none": 0.02931820364520686 }, "harness|mmlu_high_school_physics|0": { "alias": " - high_school_physics", "acc,none": 0.3509933774834437, "acc_stderr,none": 0.03896981964257374 }, "harness|mmlu_high_school_statistics|0": { "alias": " - high_school_statistics", "acc,none": 0.5370370370370371, "acc_stderr,none": 0.03400603625538271 }, "harness|mmlu_machine_learning|0": { "alias": " - machine_learning", "acc,none": 0.5, "acc_stderr,none": 0.04745789978762494 }, "harness|truthfulqa:mc2|0": { "acc,none": 0.7003691298306434, "acc_stderr,none": 0.015084794104413395, "alias": "truthfulqa_mc2" }, "harness|winogrande|0": { "acc,none": 0.7458563535911602, "acc_stderr,none": 0.012236307219708278, "alias": "winogrande" }, "harness|hellaswag|0": { "acc,none": 0.6806413065126469, "acc_stderr,none": 0.004652753439460115, "acc_norm,none": 0.8593905596494722, "acc_norm_stderr,none": 0.0034690778470563856, "alias": "hellaswag" } }, "task_info": { "model": "TheBloke/SOLAR-10.7B-Instruct-v1.0-AWQ", "revision": "main", "private": false, "params": 6.652, "architectures": "LlamaForCausalLM", "quant_type": "AWQ", "precision": "4bit", "model_params": 13.304, "model_size": 6.652, "weight_dtype": "int4", "compute_dtype": "float16", "gguf_ftype": "*Q4_0.gguf", "hardware": "gpu", "status": "Pending", "submitted_time": "2024-05-01T16:10:00Z", "model_type": "quantization", "job_id": -1, "job_start_time": null, "scripts": "ITREX" }, "quantization_config": { "bits": 4, "group_size": 128, "quant_method": "awq", "version": "gemm", "zero_point": true }, "versions": { "harness|arc:challenge|0": 1.0, "harness|openbookqa|0": 1.0, "harness|truthfulqa:mc1|0": 2.0, "harness|piqa|0": 1.0, "harness|boolq|0": 2.0, "harness|arc:easy|0": 1.0, "harness|lambada:openai|0": 1.0, "harness|mmlu|0": null, "harness|mmlu_humanities|0": null, "harness|mmlu_formal_logic|0": 0.0, "harness|mmlu_high_school_european_history|0": 0.0, "harness|mmlu_high_school_us_history|0": 0.0, "harness|mmlu_high_school_world_history|0": 0.0, "harness|mmlu_international_law|0": 0.0, "harness|mmlu_jurisprudence|0": 0.0, "harness|mmlu_logical_fallacies|0": 0.0, "harness|mmlu_moral_disputes|0": 0.0, "harness|mmlu_moral_scenarios|0": 0.0, "harness|mmlu_philosophy|0": 0.0, "harness|mmlu_prehistory|0": 0.0, "harness|mmlu_professional_law|0": 0.0, "harness|mmlu_world_religions|0": 0.0, "harness|mmlu_other|0": null, "harness|mmlu_business_ethics|0": 0.0, "harness|mmlu_clinical_knowledge|0": 0.0, "harness|mmlu_college_medicine|0": 0.0, "harness|mmlu_global_facts|0": 0.0, "harness|mmlu_human_aging|0": 0.0, "harness|mmlu_management|0": 0.0, "harness|mmlu_marketing|0": 0.0, "harness|mmlu_medical_genetics|0": 0.0, "harness|mmlu_miscellaneous|0": 0.0, "harness|mmlu_nutrition|0": 0.0, "harness|mmlu_professional_accounting|0": 0.0, "harness|mmlu_professional_medicine|0": 0.0, "harness|mmlu_virology|0": 0.0, "harness|mmlu_social_sciences|0": null, "harness|mmlu_econometrics|0": 0.0, "harness|mmlu_high_school_geography|0": 0.0, "harness|mmlu_high_school_government_and_politics|0": 0.0, "harness|mmlu_high_school_macroeconomics|0": 0.0, "harness|mmlu_high_school_microeconomics|0": 0.0, "harness|mmlu_high_school_psychology|0": 0.0, "harness|mmlu_human_sexuality|0": 0.0, "harness|mmlu_professional_psychology|0": 0.0, "harness|mmlu_public_relations|0": 0.0, "harness|mmlu_security_studies|0": 0.0, "harness|mmlu_sociology|0": 0.0, "harness|mmlu_us_foreign_policy|0": 0.0, "harness|mmlu_stem|0": null, "harness|mmlu_abstract_algebra|0": 0.0, "harness|mmlu_anatomy|0": 0.0, "harness|mmlu_astronomy|0": 0.0, "harness|mmlu_college_biology|0": 0.0, "harness|mmlu_college_chemistry|0": 0.0, "harness|mmlu_college_computer_science|0": 0.0, "harness|mmlu_college_mathematics|0": 0.0, "harness|mmlu_college_physics|0": 0.0, "harness|mmlu_computer_security|0": 0.0, "harness|mmlu_conceptual_physics|0": 0.0, "harness|mmlu_electrical_engineering|0": 0.0, "harness|mmlu_elementary_mathematics|0": 0.0, "harness|mmlu_high_school_biology|0": 0.0, "harness|mmlu_high_school_chemistry|0": 0.0, "harness|mmlu_high_school_computer_science|0": 0.0, "harness|mmlu_high_school_mathematics|0": 0.0, "harness|mmlu_high_school_physics|0": 0.0, "harness|mmlu_high_school_statistics|0": 0.0, "harness|mmlu_machine_learning|0": 0.0, "harness|truthfulqa:mc2|0": 2.0, "harness|winogrande|0": 1.0, "harness|hellaswag|0": 1.0 }, "n-shot": { "arc_challenge": 0, "arc_easy": 0, "boolq": 0, "hellaswag": 0, "lambada_openai": 0, "mmlu": 0, "mmlu_abstract_algebra": 0, "mmlu_anatomy": 0, "mmlu_astronomy": 0, "mmlu_business_ethics": 0, "mmlu_clinical_knowledge": 0, "mmlu_college_biology": 0, "mmlu_college_chemistry": 0, "mmlu_college_computer_science": 0, "mmlu_college_mathematics": 0, "mmlu_college_medicine": 0, "mmlu_college_physics": 0, "mmlu_computer_security": 0, "mmlu_conceptual_physics": 0, "mmlu_econometrics": 0, "mmlu_electrical_engineering": 0, "mmlu_elementary_mathematics": 0, "mmlu_formal_logic": 0, "mmlu_global_facts": 0, "mmlu_high_school_biology": 0, "mmlu_high_school_chemistry": 0, "mmlu_high_school_computer_science": 0, "mmlu_high_school_european_history": 0, "mmlu_high_school_geography": 0, "mmlu_high_school_government_and_politics": 0, "mmlu_high_school_macroeconomics": 0, "mmlu_high_school_mathematics": 0, "mmlu_high_school_microeconomics": 0, "mmlu_high_school_physics": 0, "mmlu_high_school_psychology": 0, "mmlu_high_school_statistics": 0, "mmlu_high_school_us_history": 0, "mmlu_high_school_world_history": 0, "mmlu_human_aging": 0, "mmlu_human_sexuality": 0, "mmlu_humanities": 0, "mmlu_international_law": 0, "mmlu_jurisprudence": 0, "mmlu_logical_fallacies": 0, "mmlu_machine_learning": 0, "mmlu_management": 0, "mmlu_marketing": 0, "mmlu_medical_genetics": 0, "mmlu_miscellaneous": 0, "mmlu_moral_disputes": 0, "mmlu_moral_scenarios": 0, "mmlu_nutrition": 0, "mmlu_other": 0, "mmlu_philosophy": 0, "mmlu_prehistory": 0, "mmlu_professional_accounting": 0, "mmlu_professional_law": 0, "mmlu_professional_medicine": 0, "mmlu_professional_psychology": 0, "mmlu_public_relations": 0, "mmlu_security_studies": 0, "mmlu_social_sciences": 0, "mmlu_sociology": 0, "mmlu_stem": 0, "mmlu_us_foreign_policy": 0, "mmlu_virology": 0, "mmlu_world_religions": 0, "openbookqa": 0, "piqa": 0, "truthfulqa_mc1": 0, "truthfulqa_mc2": 0, "winogrande": 0 }, "date": 1714605116.6428869, "config": { "model": "hf", "model_args": "pretrained=TheBloke/SOLAR-10.7B-Instruct-v1.0-AWQ,trust_remote_code=True,dtype=float16,_commit_hash=main", "batch_size": 2, "batch_sizes": [], "device": "cuda", "use_cache": null, "limit": null, "bootstrap_iters": 100000, "gen_kwargs": null } }