{ "config_general": { "lighteval_sha": "1.4", "num_few_shot_default": null, "num_fewshot_seeds": null, "override_batch_size": null, "max_samples": null, "job_id": -1, "start_time": null, "end_time": "2024-05-11-21-17-09", "total_evaluation_time_secondes": "", "model_name": "TheBloke/Mistral-7B-Instruct-v0.2-GPTQ", "model_sha": "", "model_dtype": "4bit", "model_size": 4.16, "model_params": 7.04, "quant_type": "GPTQ", "precision": "4bit" }, "results": { "harness|truthfulqa:mc1|0": { "acc,none": 0.5067319461444308, "acc_stderr,none": 0.017501914492655382, "alias": "truthfulqa_mc1" }, "harness|winogrande|0": { "acc,none": 0.7324388318863457, "acc_stderr,none": 0.012441718456893009, "alias": "winogrande" }, "harness|hellaswag|0": { "acc,none": 0.6537542322246565, "acc_stderr,none": 0.004748003276466214, "acc_norm,none": 0.8312089225253934, "acc_norm_stderr,none": 0.003738017734037975, "alias": "hellaswag" }, "harness|boolq|0": { "acc,none": 0.8431192660550458, "acc_stderr,none": 0.0063609481079962785, "alias": "boolq" }, "harness|openbookqa|0": { "acc,none": 0.328, "acc_stderr,none": 0.021017027165175495, "acc_norm,none": 0.438, "acc_norm_stderr,none": 0.022210326363977413, "alias": "openbookqa" }, "harness|mmlu|0": { "acc,none": 0.5804016521862982, "acc_stderr,none": 0.003961191476321839, "alias": "mmlu" }, "harness|mmlu_humanities|0": { "alias": " - humanities", "acc,none": 0.5341126461211477, "acc_stderr,none": 0.0068607425916126594 }, "harness|mmlu_formal_logic|0": { "alias": " - formal_logic", "acc,none": 0.3492063492063492, "acc_stderr,none": 0.04263906892795133 }, "harness|mmlu_high_school_european_history|0": { "alias": " - high_school_european_history", "acc,none": 0.7151515151515152, "acc_stderr,none": 0.03524390844511781 }, "harness|mmlu_high_school_us_history|0": { "alias": " - high_school_us_history", "acc,none": 0.7745098039215687, "acc_stderr,none": 0.02933116229425172 }, "harness|mmlu_high_school_world_history|0": { "alias": " - high_school_world_history", "acc,none": 0.7805907172995781, "acc_stderr,none": 0.026939106581553945 }, "harness|mmlu_international_law|0": { "alias": " - international_law", "acc,none": 0.7355371900826446, "acc_stderr,none": 0.040261875275912046 }, "harness|mmlu_jurisprudence|0": { "alias": " - jurisprudence", "acc,none": 0.7129629629629629, "acc_stderr,none": 0.043733130409147614 }, "harness|mmlu_logical_fallacies|0": { "alias": " - logical_fallacies", "acc,none": 0.7484662576687117, "acc_stderr,none": 0.03408997886857529 }, "harness|mmlu_moral_disputes|0": { "alias": " - moral_disputes", "acc,none": 0.6358381502890174, "acc_stderr,none": 0.025906632631016113 }, "harness|mmlu_moral_scenarios|0": { "alias": " - moral_scenarios", "acc,none": 0.34301675977653634, "acc_stderr,none": 0.015876912673057752 }, "harness|mmlu_philosophy|0": { "alias": " - philosophy", "acc,none": 0.6302250803858521, "acc_stderr,none": 0.027417996705630995 }, "harness|mmlu_prehistory|0": { "alias": " - prehistory", "acc,none": 0.6697530864197531, "acc_stderr,none": 0.026168298456732842 }, "harness|mmlu_professional_law|0": { "alias": " - professional_law", "acc,none": 0.4172099087353325, "acc_stderr,none": 0.012593959992906424 }, "harness|mmlu_world_religions|0": { "alias": " - world_religions", "acc,none": 0.8187134502923976, "acc_stderr,none": 0.029547741687640038 }, "harness|mmlu_other|0": { "alias": " - other", "acc,none": 0.6462825877051819, "acc_stderr,none": 0.008254745930389685 }, "harness|mmlu_business_ethics|0": { "alias": " - business_ethics", "acc,none": 0.58, "acc_stderr,none": 0.049604496374885836 }, "harness|mmlu_clinical_knowledge|0": { "alias": " - clinical_knowledge", "acc,none": 0.6830188679245283, "acc_stderr,none": 0.02863723563980089 }, "harness|mmlu_college_medicine|0": { "alias": " - college_medicine", "acc,none": 0.5722543352601156, "acc_stderr,none": 0.03772446857518027 }, "harness|mmlu_global_facts|0": { "alias": " - global_facts", "acc,none": 0.33, "acc_stderr,none": 0.04725815626252606 }, "harness|mmlu_human_aging|0": { "alias": " - human_aging", "acc,none": 0.6053811659192825, "acc_stderr,none": 0.03280400504755291 }, "harness|mmlu_management|0": { "alias": " - management", "acc,none": 0.7475728155339806, "acc_stderr,none": 0.04301250399690878 }, "harness|mmlu_marketing|0": { "alias": " - marketing", "acc,none": 0.8589743589743589, "acc_stderr,none": 0.022801382534597524 }, "harness|mmlu_medical_genetics|0": { "alias": " - medical_genetics", "acc,none": 0.62, "acc_stderr,none": 0.04878317312145632 }, "harness|mmlu_miscellaneous|0": { "alias": " - miscellaneous", "acc,none": 0.7701149425287356, "acc_stderr,none": 0.015046301846691807 }, "harness|mmlu_nutrition|0": { "alias": " - nutrition", "acc,none": 0.6274509803921569, "acc_stderr,none": 0.027684181883302895 }, "harness|mmlu_professional_accounting|0": { "alias": " - professional_accounting", "acc,none": 0.44680851063829785, "acc_stderr,none": 0.029658235097666907 }, "harness|mmlu_professional_medicine|0": { "alias": " - professional_medicine", "acc,none": 0.625, "acc_stderr,none": 0.029408372932278746 }, "harness|mmlu_virology|0": { "alias": " - virology", "acc,none": 0.42771084337349397, "acc_stderr,none": 0.038515976837185335 }, "harness|mmlu_social_sciences|0": { "alias": " - social_sciences", "acc,none": 0.677608059798505, "acc_stderr,none": 0.008221694733283947 }, "harness|mmlu_econometrics|0": { "alias": " - econometrics", "acc,none": 0.39473684210526316, "acc_stderr,none": 0.04598188057816542 }, "harness|mmlu_high_school_geography|0": { "alias": " - high_school_geography", "acc,none": 0.7424242424242424, "acc_stderr,none": 0.031156269519646843 }, "harness|mmlu_high_school_government_and_politics|0": { "alias": " - high_school_government_and_politics", "acc,none": 0.7979274611398963, "acc_stderr,none": 0.02897908979429673 }, "harness|mmlu_high_school_macroeconomics|0": { "alias": " - high_school_macroeconomics", "acc,none": 0.558974358974359, "acc_stderr,none": 0.02517404838400075 }, "harness|mmlu_high_school_microeconomics|0": { "alias": " - high_school_microeconomics", "acc,none": 0.6386554621848739, "acc_stderr,none": 0.031204691225150016 }, "harness|mmlu_high_school_psychology|0": { "alias": " - high_school_psychology", "acc,none": 0.7743119266055046, "acc_stderr,none": 0.017923087667803057 }, "harness|mmlu_human_sexuality|0": { "alias": " - human_sexuality", "acc,none": 0.7022900763358778, "acc_stderr,none": 0.040103589424622034 }, "harness|mmlu_professional_psychology|0": { "alias": " - professional_psychology", "acc,none": 0.5915032679738562, "acc_stderr,none": 0.019886221037501862 }, "harness|mmlu_public_relations|0": { "alias": " - public_relations", "acc,none": 0.6636363636363637, "acc_stderr,none": 0.04525393596302505 }, "harness|mmlu_security_studies|0": { "alias": " - security_studies", "acc,none": 0.7061224489795919, "acc_stderr,none": 0.029162738410249772 }, "harness|mmlu_sociology|0": { "alias": " - sociology", "acc,none": 0.8208955223880597, "acc_stderr,none": 0.027113286753111837 }, "harness|mmlu_us_foreign_policy|0": { "alias": " - us_foreign_policy", "acc,none": 0.82, "acc_stderr,none": 0.038612291966536934 }, "harness|mmlu_stem|0": { "alias": " - stem", "acc,none": 0.48969235648588644, "acc_stderr,none": 0.008709716985915076 }, "harness|mmlu_abstract_algebra|0": { "alias": " - abstract_algebra", "acc,none": 0.34, "acc_stderr,none": 0.047609522856952365 }, "harness|mmlu_anatomy|0": { "alias": " - anatomy", "acc,none": 0.5777777777777777, "acc_stderr,none": 0.04266763404099582 }, "harness|mmlu_astronomy|0": { "alias": " - astronomy", "acc,none": 0.6052631578947368, "acc_stderr,none": 0.039777499346220734 }, "harness|mmlu_college_biology|0": { "alias": " - college_biology", "acc,none": 0.6388888888888888, "acc_stderr,none": 0.04016660030451233 }, "harness|mmlu_college_chemistry|0": { "alias": " - college_chemistry", "acc,none": 0.4, "acc_stderr,none": 0.04923659639173309 }, "harness|mmlu_college_computer_science|0": { "alias": " - college_computer_science", "acc,none": 0.54, "acc_stderr,none": 0.05009082659620332 }, "harness|mmlu_college_mathematics|0": { "alias": " - college_mathematics", "acc,none": 0.36, "acc_stderr,none": 0.048241815132442176 }, "harness|mmlu_college_physics|0": { "alias": " - college_physics", "acc,none": 0.43137254901960786, "acc_stderr,none": 0.04928099597287534 }, "harness|mmlu_computer_security|0": { "alias": " - computer_security", "acc,none": 0.66, "acc_stderr,none": 0.04760952285695237 }, "harness|mmlu_conceptual_physics|0": { "alias": " - conceptual_physics", "acc,none": 0.4978723404255319, "acc_stderr,none": 0.03268572658667492 }, "harness|mmlu_electrical_engineering|0": { "alias": " - electrical_engineering", "acc,none": 0.5586206896551724, "acc_stderr,none": 0.04137931034482758 }, "harness|mmlu_elementary_mathematics|0": { "alias": " - elementary_mathematics", "acc,none": 0.3994708994708995, "acc_stderr,none": 0.02522545028406788 }, "harness|mmlu_high_school_biology|0": { "alias": " - high_school_biology", "acc,none": 0.6580645161290323, "acc_stderr,none": 0.026985289576552746 }, "harness|mmlu_high_school_chemistry|0": { "alias": " - high_school_chemistry", "acc,none": 0.49261083743842365, "acc_stderr,none": 0.03517603540361008 }, "harness|mmlu_high_school_computer_science|0": { "alias": " - high_school_computer_science", "acc,none": 0.64, "acc_stderr,none": 0.04824181513244218 }, "harness|mmlu_high_school_mathematics|0": { "alias": " - high_school_mathematics", "acc,none": 0.34444444444444444, "acc_stderr,none": 0.02897264888484427 }, "harness|mmlu_high_school_physics|0": { "alias": " - high_school_physics", "acc,none": 0.3708609271523179, "acc_stderr,none": 0.03943966699183629 }, "harness|mmlu_high_school_statistics|0": { "alias": " - high_school_statistics", "acc,none": 0.4212962962962963, "acc_stderr,none": 0.03367462138896078 }, "harness|mmlu_machine_learning|0": { "alias": " - machine_learning", "acc,none": 0.45535714285714285, "acc_stderr,none": 0.04726835553719099 }, "harness|truthfulqa:mc2|0": { "acc,none": 0.6730758600725846, "acc_stderr,none": 0.015085602828695083, "alias": "truthfulqa_mc2" }, "harness|arc:challenge|0": { "acc,none": 0.5477815699658704, "acc_stderr,none": 0.014544519880633827, "acc_norm,none": 0.560580204778157, "acc_norm_stderr,none": 0.014503747823580129, "alias": "arc_challenge" }, "harness|arc:easy|0": { "acc,none": 0.8152356902356902, "acc_stderr,none": 0.007963772171570785, "acc_norm,none": 0.7609427609427609, "acc_norm_stderr,none": 0.008751754723580422, "alias": "arc_easy" }, "harness|piqa|0": { "acc,none": 0.7970620239390642, "acc_stderr,none": 0.009383679003767338, "acc_norm,none": 0.8025027203482046, "acc_norm_stderr,none": 0.009288578108523262, "alias": "piqa" }, "harness|lambada:openai|0": { "perplexity,none": 3.542202154991171, "perplexity_stderr,none": 0.07532470166471553, "acc,none": 0.7110421113914225, "acc_stderr,none": 0.00631505317377688, "alias": "lambada_openai" } }, "task_info": { "model": "TheBloke/Mistral-7B-Instruct-v0.2-GPTQ", "revision": "main", "private": false, "params": 4.16, "architectures": "MistralForCausalLM", "quant_type": "GPTQ", "precision": "4bit", "model_params": 7.04, "model_size": 4.16, "weight_dtype": "int4", "compute_dtype": "float16", "gguf_ftype": "*Q4_0.gguf", "hardware": "gpu", "status": "Pending", "submitted_time": "2024-05-10T05:47:33Z", "model_type": "quantization", "job_id": -1, "job_start_time": null, "scripts": "ITREX" }, "quantization_config": { "bits": 4, "group_size": 128, "damp_percent": 0.1, "desc_act": true, "sym": true, "true_sequential": true, "model_name_or_path": null, "model_file_base_name": "model", "quant_method": "gptq" }, "versions": { "harness|truthfulqa:mc1|0": 2.0, "harness|winogrande|0": 1.0, "harness|hellaswag|0": 1.0, "harness|boolq|0": 2.0, "harness|openbookqa|0": 1.0, "harness|mmlu|0": null, "harness|mmlu_humanities|0": null, "harness|mmlu_formal_logic|0": 0.0, "harness|mmlu_high_school_european_history|0": 0.0, "harness|mmlu_high_school_us_history|0": 0.0, "harness|mmlu_high_school_world_history|0": 0.0, "harness|mmlu_international_law|0": 0.0, "harness|mmlu_jurisprudence|0": 0.0, "harness|mmlu_logical_fallacies|0": 0.0, "harness|mmlu_moral_disputes|0": 0.0, "harness|mmlu_moral_scenarios|0": 0.0, "harness|mmlu_philosophy|0": 0.0, "harness|mmlu_prehistory|0": 0.0, "harness|mmlu_professional_law|0": 0.0, "harness|mmlu_world_religions|0": 0.0, "harness|mmlu_other|0": null, "harness|mmlu_business_ethics|0": 0.0, "harness|mmlu_clinical_knowledge|0": 0.0, "harness|mmlu_college_medicine|0": 0.0, "harness|mmlu_global_facts|0": 0.0, "harness|mmlu_human_aging|0": 0.0, "harness|mmlu_management|0": 0.0, "harness|mmlu_marketing|0": 0.0, "harness|mmlu_medical_genetics|0": 0.0, "harness|mmlu_miscellaneous|0": 0.0, "harness|mmlu_nutrition|0": 0.0, "harness|mmlu_professional_accounting|0": 0.0, "harness|mmlu_professional_medicine|0": 0.0, "harness|mmlu_virology|0": 0.0, "harness|mmlu_social_sciences|0": null, "harness|mmlu_econometrics|0": 0.0, "harness|mmlu_high_school_geography|0": 0.0, "harness|mmlu_high_school_government_and_politics|0": 0.0, "harness|mmlu_high_school_macroeconomics|0": 0.0, "harness|mmlu_high_school_microeconomics|0": 0.0, "harness|mmlu_high_school_psychology|0": 0.0, "harness|mmlu_human_sexuality|0": 0.0, "harness|mmlu_professional_psychology|0": 0.0, "harness|mmlu_public_relations|0": 0.0, "harness|mmlu_security_studies|0": 0.0, "harness|mmlu_sociology|0": 0.0, "harness|mmlu_us_foreign_policy|0": 0.0, "harness|mmlu_stem|0": null, "harness|mmlu_abstract_algebra|0": 0.0, "harness|mmlu_anatomy|0": 0.0, "harness|mmlu_astronomy|0": 0.0, "harness|mmlu_college_biology|0": 0.0, "harness|mmlu_college_chemistry|0": 0.0, "harness|mmlu_college_computer_science|0": 0.0, "harness|mmlu_college_mathematics|0": 0.0, "harness|mmlu_college_physics|0": 0.0, "harness|mmlu_computer_security|0": 0.0, "harness|mmlu_conceptual_physics|0": 0.0, "harness|mmlu_electrical_engineering|0": 0.0, "harness|mmlu_elementary_mathematics|0": 0.0, "harness|mmlu_high_school_biology|0": 0.0, "harness|mmlu_high_school_chemistry|0": 0.0, "harness|mmlu_high_school_computer_science|0": 0.0, "harness|mmlu_high_school_mathematics|0": 0.0, "harness|mmlu_high_school_physics|0": 0.0, "harness|mmlu_high_school_statistics|0": 0.0, "harness|mmlu_machine_learning|0": 0.0, "harness|truthfulqa:mc2|0": 2.0, "harness|arc:challenge|0": 1.0, "harness|arc:easy|0": 1.0, "harness|piqa|0": 1.0, "harness|lambada:openai|0": 1.0 }, "n-shot": { "arc_challenge": 0, "arc_easy": 0, "boolq": 0, "hellaswag": 0, "lambada_openai": 0, "mmlu": 0, "mmlu_abstract_algebra": 0, "mmlu_anatomy": 0, "mmlu_astronomy": 0, "mmlu_business_ethics": 0, "mmlu_clinical_knowledge": 0, "mmlu_college_biology": 0, "mmlu_college_chemistry": 0, "mmlu_college_computer_science": 0, "mmlu_college_mathematics": 0, "mmlu_college_medicine": 0, "mmlu_college_physics": 0, "mmlu_computer_security": 0, "mmlu_conceptual_physics": 0, "mmlu_econometrics": 0, "mmlu_electrical_engineering": 0, "mmlu_elementary_mathematics": 0, "mmlu_formal_logic": 0, "mmlu_global_facts": 0, "mmlu_high_school_biology": 0, "mmlu_high_school_chemistry": 0, "mmlu_high_school_computer_science": 0, "mmlu_high_school_european_history": 0, "mmlu_high_school_geography": 0, "mmlu_high_school_government_and_politics": 0, "mmlu_high_school_macroeconomics": 0, "mmlu_high_school_mathematics": 0, "mmlu_high_school_microeconomics": 0, "mmlu_high_school_physics": 0, "mmlu_high_school_psychology": 0, "mmlu_high_school_statistics": 0, "mmlu_high_school_us_history": 0, "mmlu_high_school_world_history": 0, "mmlu_human_aging": 0, "mmlu_human_sexuality": 0, "mmlu_humanities": 0, "mmlu_international_law": 0, "mmlu_jurisprudence": 0, "mmlu_logical_fallacies": 0, "mmlu_machine_learning": 0, "mmlu_management": 0, "mmlu_marketing": 0, "mmlu_medical_genetics": 0, "mmlu_miscellaneous": 0, "mmlu_moral_disputes": 0, "mmlu_moral_scenarios": 0, "mmlu_nutrition": 0, "mmlu_other": 0, "mmlu_philosophy": 0, "mmlu_prehistory": 0, "mmlu_professional_accounting": 0, "mmlu_professional_law": 0, "mmlu_professional_medicine": 0, "mmlu_professional_psychology": 0, "mmlu_public_relations": 0, "mmlu_security_studies": 0, "mmlu_social_sciences": 0, "mmlu_sociology": 0, "mmlu_stem": 0, "mmlu_us_foreign_policy": 0, "mmlu_virology": 0, "mmlu_world_religions": 0, "openbookqa": 0, "piqa": 0, "truthfulqa_mc1": 0, "truthfulqa_mc2": 0, "winogrande": 0 }, "date": 1715428953.698051, "config": { "model": "hf", "model_args": "pretrained=TheBloke/Mistral-7B-Instruct-v0.2-GPTQ,trust_remote_code=True,dtype=float16,_commit_hash=main", "batch_size": 2, "batch_sizes": [], "device": "cuda", "use_cache": null, "limit": null, "bootstrap_iters": 100000, "gen_kwargs": null } }